Skip to content
This repository was archived by the owner on Nov 11, 2022. It is now read-only.

Commit b76e48c

Browse files
authored
checkstyle: enforce static import from Preconditions (#455)
A spiritual backport of apache/beam#1030
1 parent 3212b63 commit b76e48c

7 files changed

Lines changed: 49 additions & 48 deletions

File tree

checkstyle.xml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,12 @@ page at http://checkstyle.sourceforge.net/config.html -->
141141
<property name="format" value="com\.google\.api\.client\.util\.(ByteStreams|Charsets|Collections2|Joiner|Lists|Maps|Objects|Preconditions|Sets|Strings|Throwables)"/>
142142
</module>
143143

144+
<!-- Require static importing from Preconditions. -->
145+
<module name="RegexpSinglelineJava">
146+
<property name="format" value="^import com.google.common.base.Preconditions;$"/>
147+
<property name="message" value="Static import functions from Guava Preconditions"/>
148+
</module>
149+
144150
<module name="UnusedImports">
145151
<property name="severity" value="error"/>
146152
<property name="processJavadoc" value="true"/>

contrib/firebaseio/src/main/java/com/google/cloud/dataflow/contrib/firebase/io/FirebaseSource.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@
1515
*/
1616
package com.google.cloud.dataflow.contrib.firebase.io;
1717

18+
import static com.google.common.base.Preconditions.checkNotNull;
19+
1820
import com.google.cloud.dataflow.contrib.firebase.events.ChildAdded;
1921
import com.google.cloud.dataflow.contrib.firebase.events.ChildChanged;
2022
import com.google.cloud.dataflow.contrib.firebase.events.ChildMoved;
@@ -33,8 +35,6 @@
3335
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
3436
import com.google.cloud.dataflow.sdk.values.TypeDescriptor;
3537

36-
import com.google.common.base.Preconditions;
37-
3838
import com.fasterxml.jackson.databind.ObjectWriter;
3939
import com.firebase.client.AuthData;
4040
import com.firebase.client.ChildEventListener;
@@ -389,7 +389,7 @@ public void validate() {
389389
Thread.currentThread().interrupt();
390390
throw new RuntimeException("Thread interrupted while waiting for authentication to complete", e);
391391
}
392-
Preconditions.checkNotNull(result, "If result is null, authenticators is ");
392+
checkNotNull(result, "If result is null, authenticators is ");
393393
}
394394

395395
@Override

contrib/hadoop/src/main/java/com/google/cloud/dataflow/contrib/hadoop/HadoopFileSource.java

Lines changed: 8 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,9 @@
1616

1717
package com.google.cloud.dataflow.contrib.hadoop;
1818

19+
import static com.google.common.base.Preconditions.checkArgument;
20+
import static com.google.common.base.Preconditions.checkNotNull;
21+
1922
import com.google.cloud.dataflow.sdk.coders.Coder;
2023
import com.google.cloud.dataflow.sdk.coders.KvCoder;
2124
import com.google.cloud.dataflow.sdk.coders.VoidCoder;
@@ -24,7 +27,6 @@
2427
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
2528
import com.google.cloud.dataflow.sdk.values.KV;
2629
import com.google.common.base.Function;
27-
import com.google.common.base.Preconditions;
2830
import com.google.common.collect.ImmutableList;
2931
import com.google.common.collect.Lists;
3032
import org.apache.hadoop.conf.Configuration;
@@ -164,14 +166,10 @@ public Class<V> getValueClass() {
164166

165167
@Override
166168
public void validate() {
167-
Preconditions.checkNotNull(filepattern,
168-
"need to set the filepattern of a HadoopFileSource");
169-
Preconditions.checkNotNull(formatClass,
170-
"need to set the format class of a HadoopFileSource");
171-
Preconditions.checkNotNull(keyClass,
172-
"need to set the key class of a HadoopFileSource");
173-
Preconditions.checkNotNull(valueClass,
174-
"need to set the value class of a HadoopFileSource");
169+
checkNotNull(filepattern, "need to set the filepattern of a HadoopFileSource");
170+
checkNotNull(formatClass, "need to set the format class of a HadoopFileSource");
171+
checkNotNull(keyClass, "need to set the key class of a HadoopFileSource");
172+
checkNotNull(valueClass, "need to set the value class of a HadoopFileSource");
175173
}
176174

177175
@Override
@@ -466,8 +464,7 @@ public SerializableSplit() {
466464
}
467465

468466
public SerializableSplit(InputSplit split) {
469-
Preconditions.checkArgument(split instanceof Writable, "Split is not writable: "
470-
+ split);
467+
checkArgument(split instanceof Writable, "Split is not writable: %s", split);
471468
this.split = split;
472469
}
473470

contrib/join-library/src/main/java/com/google/cloud/dataflow/contrib/joinlibrary/Join.java

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616

1717
package com.google.cloud.dataflow.contrib.joinlibrary;
1818

19+
import static com.google.common.base.Preconditions.checkNotNull;
20+
1921
import com.google.cloud.dataflow.sdk.coders.KvCoder;
2022
import com.google.cloud.dataflow.sdk.transforms.DoFn;
2123
import com.google.cloud.dataflow.sdk.transforms.ParDo;
@@ -25,7 +27,6 @@
2527
import com.google.cloud.dataflow.sdk.values.KV;
2628
import com.google.cloud.dataflow.sdk.values.PCollection;
2729
import com.google.cloud.dataflow.sdk.values.TupleTag;
28-
import com.google.common.base.Preconditions;
2930

3031
/**
3132
* Utility class with different versions of joins. All methods join two collections of
@@ -45,8 +46,8 @@ public class Join {
4546
*/
4647
public static <K, V1, V2> PCollection<KV<K, KV<V1, V2>>> innerJoin(
4748
final PCollection<KV<K, V1>> leftCollection, final PCollection<KV<K, V2>> rightCollection) {
48-
Preconditions.checkNotNull(leftCollection);
49-
Preconditions.checkNotNull(rightCollection);
49+
checkNotNull(leftCollection);
50+
checkNotNull(rightCollection);
5051

5152
final TupleTag<V1> v1Tuple = new TupleTag<>();
5253
final TupleTag<V2> v2Tuple = new TupleTag<>();
@@ -93,9 +94,9 @@ public static <K, V1, V2> PCollection<KV<K, KV<V1, V2>>> leftOuterJoin(
9394
final PCollection<KV<K, V1>> leftCollection,
9495
final PCollection<KV<K, V2>> rightCollection,
9596
final V2 nullValue) {
96-
Preconditions.checkNotNull(leftCollection);
97-
Preconditions.checkNotNull(rightCollection);
98-
Preconditions.checkNotNull(nullValue);
97+
checkNotNull(leftCollection);
98+
checkNotNull(rightCollection);
99+
checkNotNull(nullValue);
99100

100101
final TupleTag<V1> v1Tuple = new TupleTag<>();
101102
final TupleTag<V2> v2Tuple = new TupleTag<>();
@@ -146,9 +147,9 @@ public static <K, V1, V2> PCollection<KV<K, KV<V1, V2>>> rightOuterJoin(
146147
final PCollection<KV<K, V1>> leftCollection,
147148
final PCollection<KV<K, V2>> rightCollection,
148149
final V1 nullValue) {
149-
Preconditions.checkNotNull(leftCollection);
150-
Preconditions.checkNotNull(rightCollection);
151-
Preconditions.checkNotNull(nullValue);
150+
checkNotNull(leftCollection);
151+
checkNotNull(rightCollection);
152+
checkNotNull(nullValue);
152153

153154
final TupleTag<V1> v1Tuple = new TupleTag<>();
154155
final TupleTag<V2> v2Tuple = new TupleTag<>();

sdk/src/main/java/com/google/cloud/dataflow/sdk/io/CompressedSource.java

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,17 +16,17 @@
1616

1717
package com.google.cloud.dataflow.sdk.io;
1818

19+
import static com.google.common.base.Preconditions.checkArgument;
20+
import static com.google.common.base.Preconditions.checkNotNull;
21+
1922
import com.google.cloud.dataflow.sdk.annotations.Experimental;
2023
import com.google.cloud.dataflow.sdk.coders.Coder;
2124
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
2225
import com.google.cloud.dataflow.sdk.transforms.display.DisplayData;
23-
import com.google.common.base.Preconditions;
2426
import com.google.common.io.ByteStreams;
2527
import com.google.common.primitives.Ints;
26-
2728
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
2829
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
29-
3030
import java.io.IOException;
3131
import java.io.PushbackInputStream;
3232
import java.io.Serializable;
@@ -35,7 +35,6 @@
3535
import java.nio.channels.ReadableByteChannel;
3636
import java.util.NoSuchElementException;
3737
import java.util.zip.GZIPInputStream;
38-
3938
import javax.annotation.concurrent.GuardedBy;
4039

4140
/**
@@ -253,7 +252,7 @@ private CompressedSource(FileBasedSource<T> sourceDelegate,
253252
this.sourceDelegate = sourceDelegate;
254253
this.channelFactory = channelFactory;
255254
try {
256-
Preconditions.checkArgument(
255+
checkArgument(
257256
isSplittable() || startOffset == 0,
258257
"CompressedSources must start reading at offset 0. Requested offset: " + startOffset);
259258
} catch (Exception e) {
@@ -268,9 +267,9 @@ private CompressedSource(FileBasedSource<T> sourceDelegate,
268267
@Override
269268
public void validate() {
270269
super.validate();
271-
Preconditions.checkNotNull(sourceDelegate);
270+
checkNotNull(sourceDelegate);
272271
sourceDelegate.validate();
273-
Preconditions.checkNotNull(channelFactory);
272+
checkNotNull(channelFactory);
274273
}
275274

276275
/**

sdk/src/main/java/com/google/cloud/dataflow/sdk/io/FileBasedSource.java

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -14,22 +14,22 @@
1414

1515
package com.google.cloud.dataflow.sdk.io;
1616

17+
import static com.google.common.base.Preconditions.checkArgument;
18+
import static com.google.common.base.Preconditions.checkState;
19+
1720
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
1821
import com.google.cloud.dataflow.sdk.transforms.display.DisplayData;
1922
import com.google.cloud.dataflow.sdk.util.IOChannelFactory;
2023
import com.google.cloud.dataflow.sdk.util.IOChannelUtils;
21-
import com.google.common.base.Preconditions;
2224
import com.google.common.collect.ImmutableList;
2325
import com.google.common.collect.Iterables;
2426
import com.google.common.util.concurrent.Futures;
2527
import com.google.common.util.concurrent.ListenableFuture;
2628
import com.google.common.util.concurrent.ListeningExecutorService;
2729
import com.google.common.util.concurrent.MoreExecutors;
28-
2930
import org.joda.time.Instant;
3031
import org.slf4j.Logger;
3132
import org.slf4j.LoggerFactory;
32-
3333
import java.io.IOException;
3434
import java.nio.channels.ReadableByteChannel;
3535
import java.nio.channels.SeekableByteChannel;
@@ -133,18 +133,18 @@ public final Mode getMode() {
133133

134134
@Override
135135
public final FileBasedSource<T> createSourceForSubrange(long start, long end) {
136-
Preconditions.checkArgument(mode != Mode.FILEPATTERN,
136+
checkArgument(mode != Mode.FILEPATTERN,
137137
"Cannot split a file pattern based source based on positions");
138-
Preconditions.checkArgument(start >= getStartOffset(), "Start offset value " + start
138+
checkArgument(start >= getStartOffset(), "Start offset value " + start
139139
+ " of the subrange cannot be smaller than the start offset value " + getStartOffset()
140140
+ " of the parent source");
141-
Preconditions.checkArgument(end <= getEndOffset(), "End offset value " + end
141+
checkArgument(end <= getEndOffset(), "End offset value " + end
142142
+ " of the subrange cannot be larger than the end offset value " + getEndOffset()
143143
+ " of the parent source");
144144

145145
FileBasedSource<T> source = createForSubrangeOfFile(fileOrPatternSpec, start, end);
146146
if (start > 0 || end != Long.MAX_VALUE) {
147-
Preconditions.checkArgument(source.getMode() == Mode.SINGLE_FILE_OR_SUBRANGE,
147+
checkArgument(source.getMode() == Mode.SINGLE_FILE_OR_SUBRANGE,
148148
"Source created for the range [" + start + "," + end + ")"
149149
+ " must be a subrange source");
150150
}
@@ -386,10 +386,10 @@ public void validate() {
386386
super.validate();
387387
switch (mode) {
388388
case FILEPATTERN:
389-
Preconditions.checkArgument(getStartOffset() == 0,
389+
checkArgument(getStartOffset() == 0,
390390
"FileBasedSource is based on a file pattern or a full single file "
391391
+ "but the starting offset proposed " + getStartOffset() + " is not zero");
392-
Preconditions.checkArgument(getEndOffset() == Long.MAX_VALUE,
392+
checkArgument(getEndOffset() == Long.MAX_VALUE,
393393
"FileBasedSource is based on a file pattern or a full single file "
394394
+ "but the ending offset proposed " + getEndOffset() + " is not Long.MAX_VALUE");
395395
break;
@@ -468,7 +468,7 @@ public abstract static class FileBasedReader<T> extends OffsetBasedReader<T> {
468468
*/
469469
public FileBasedReader(FileBasedSource<T> source) {
470470
super(source);
471-
Preconditions.checkArgument(source.getMode() != Mode.FILEPATTERN,
471+
checkArgument(source.getMode() != Mode.FILEPATTERN,
472472
"FileBasedReader does not support reading file patterns");
473473
}
474474

@@ -488,10 +488,10 @@ protected final boolean startImpl() throws IOException {
488488
seekChannel.position(source.getStartOffset());
489489
} else {
490490
// Channel is not seekable. Must not be a subrange.
491-
Preconditions.checkArgument(source.mode != Mode.SINGLE_FILE_OR_SUBRANGE,
491+
checkArgument(source.mode != Mode.SINGLE_FILE_OR_SUBRANGE,
492492
"Subrange-based sources must only be defined for file types that support seekable "
493493
+ " read channels");
494-
Preconditions.checkArgument(source.getStartOffset() == 0, "Start offset "
494+
checkArgument(source.getStartOffset() == 0, "Start offset "
495495
+ source.getStartOffset()
496496
+ " is not zero but channel for reading the file is not seekable.");
497497
}
@@ -574,7 +574,7 @@ public boolean start() throws IOException {
574574

575575
@Override
576576
public boolean advance() throws IOException {
577-
Preconditions.checkState(currentReader != null, "Call start() before advance()");
577+
checkState(currentReader != null, "Call start() before advance()");
578578
if (currentReader.advance()) {
579579
return true;
580580
}

sdk/src/main/java/com/google/cloud/dataflow/sdk/io/TextIO.java

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616

1717
package com.google.cloud.dataflow.sdk.io;
1818

19+
import static com.google.common.base.Preconditions.checkArgument;
1920
import static com.google.common.base.Preconditions.checkState;
2021

2122
import com.google.cloud.dataflow.sdk.coders.Coder;
@@ -34,9 +35,7 @@
3435
import com.google.cloud.dataflow.sdk.values.PDone;
3536
import com.google.cloud.dataflow.sdk.values.PInput;
3637
import com.google.common.annotations.VisibleForTesting;
37-
import com.google.common.base.Preconditions;
3838
import com.google.protobuf.ByteString;
39-
4039
import java.io.IOException;
4140
import java.io.OutputStream;
4241
import java.nio.ByteBuffer;
@@ -47,7 +46,6 @@
4746
import java.nio.charset.StandardCharsets;
4847
import java.util.NoSuchElementException;
4948
import java.util.regex.Pattern;
50-
5149
import javax.annotation.Nullable;
5250

5351
/**
@@ -598,7 +596,7 @@ public Bound<T> withSuffix(String nameExtension) {
598596
* @see ShardNameTemplate
599597
*/
600598
public Bound<T> withNumShards(int numShards) {
601-
Preconditions.checkArgument(numShards >= 0);
599+
checkArgument(numShards >= 0);
602600
return new Bound<>(name, filenamePrefix, filenameSuffix, header, footer, coder, numShards,
603601
shardTemplate, validate);
604602
}
@@ -821,7 +819,7 @@ public boolean matches(String filename) {
821819
private static final Pattern SHARD_OUTPUT_PATTERN = Pattern.compile("@([0-9]+|\\*)");
822820

823821
private static void validateOutputComponent(String partialFilePattern) {
824-
Preconditions.checkArgument(
822+
checkArgument(
825823
!SHARD_OUTPUT_PATTERN.matcher(partialFilePattern).find(),
826824
"Output name components are not allowed to contain @* or @N patterns: "
827825
+ partialFilePattern);

0 commit comments

Comments
 (0)