Skip to content
This repository was archived by the owner on Mar 23, 2026. It is now read-only.

Commit ed1522f

Browse files
committed
debug: simplify reproducer
1 parent 60bc184 commit ed1522f

3 files changed

Lines changed: 21 additions & 113 deletions

File tree

Original file line numberDiff line numberDiff line change
@@ -1,2 +1,4 @@
11
Args = --add-opens=java.base/java.nio=ALL-UNNAMED \
2-
--initialize-at-run-time=org.apache.arrow.memory.netty.DefaultAllocationManagerFactory
2+
--initialize-at-build-time=org.apache.arrow.memory.DefaultAllocationManagerOption
3+
4+
# --initialize-at-run-time=org.apache.arrow.memory.netty.DefaultAllocationManagerFactory

google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java

Lines changed: 15 additions & 112 deletions
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,22 @@
214214
import org.junit.rules.Timeout;
215215
import org.threeten.extra.PeriodDuration;
216216

217+
import org.apache.arrow.memory.BufferAllocator;
218+
import org.apache.arrow.memory.RootAllocator;
217219
public class ITBigQueryTest {
220+
@Test
221+
public void testArrowForName() throws Exception {
222+
final String clazzName = "org.apache.arrow.memory.netty.DefaultAllocationManagerFactory";
223+
try {
224+
BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE);
225+
//java.lang.reflect.Field field = Class.forName(clazzName).getDeclaredField("FACTORY");
226+
//field.setAccessible(true);
227+
//System.out.println("OK");
228+
} catch (Exception e) {
229+
throw new RuntimeException("Unable to instantiate Allocation Manager for " + clazzName, e);
230+
}
231+
}
232+
218233

219234
private static final byte[] BYTES = {0xD, 0xE, 0xA, 0xD};
220235
private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES);
@@ -1064,118 +1079,6 @@ public CompletableResultCode shutdown() {
10641079

10651080
@Rule public Timeout globalTimeout = Timeout.seconds(300);
10661081

1067-
@BeforeClass
1068-
public static void beforeClass() throws InterruptedException, IOException {
1069-
RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
1070-
RemoteStorageHelper storageHelper = RemoteStorageHelper.create();
1071-
Map<String, String> labels = ImmutableMap.of("test-job-name", "test-load-job");
1072-
SdkTracerProvider tracerProvider =
1073-
SdkTracerProvider.builder()
1074-
.addSpanProcessor(SimpleSpanProcessor.create(new TestSpanExporter()))
1075-
.setSampler(Sampler.alwaysOn())
1076-
.build();
1077-
otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).buildAndRegisterGlobal();
1078-
1079-
bigquery = bigqueryHelper.getOptions().getService();
1080-
storage = storageHelper.getOptions().getService();
1081-
storage.create(BucketInfo.of(BUCKET));
1082-
storage.create(
1083-
BlobInfo.newBuilder(BUCKET, LOAD_FILE).setContentType("text/plain").build(),
1084-
CSV_CONTENT.getBytes(StandardCharsets.UTF_8));
1085-
storage.create(
1086-
BlobInfo.newBuilder(BUCKET, LOAD_FILE_NULL).setContentType("text/plain").build(),
1087-
CSV_CONTENT_NULL.getBytes(StandardCharsets.UTF_8));
1088-
storage.create(
1089-
BlobInfo.newBuilder(BUCKET, LOAD_FILE_FLEXIBLE_COLUMN_NAME)
1090-
.setContentType("text/plain")
1091-
.build(),
1092-
CSV_CONTENT_FLEXIBLE_COLUMN.getBytes(StandardCharsets.UTF_8));
1093-
storage.create(
1094-
BlobInfo.newBuilder(BUCKET, JSON_LOAD_FILE).setContentType("application/json").build(),
1095-
JSON_CONTENT.getBytes(StandardCharsets.UTF_8));
1096-
storage.create(
1097-
BlobInfo.newBuilder(BUCKET, JSON_LOAD_FILE_SIMPLE)
1098-
.setContentType("application/json")
1099-
.build(),
1100-
JSON_CONTENT_SIMPLE.getBytes(StandardCharsets.UTF_8));
1101-
InputStream stream =
1102-
ITBigQueryTest.class.getClassLoader().getResourceAsStream("QueryTestData.csv");
1103-
storage.createFrom(
1104-
BlobInfo.newBuilder(BUCKET, LOAD_FILE_LARGE).setContentType("text/plain").build(), stream);
1105-
storage.create(
1106-
BlobInfo.newBuilder(BUCKET, JSON_LOAD_FILE_BQ_RESULTSET)
1107-
.setContentType("application/json")
1108-
.build(),
1109-
JSON_CONTENT_BQ_RESULTSET.getBytes(StandardCharsets.UTF_8));
1110-
DatasetInfo info =
1111-
DatasetInfo.newBuilder(DATASET).setDescription(DESCRIPTION).setLabels(LABELS).build();
1112-
bigquery.create(info);
1113-
DatasetInfo info2 =
1114-
DatasetInfo.newBuilder(MODEL_DATASET).setDescription("java model lifecycle").build();
1115-
bigquery.create(info2);
1116-
DatasetInfo info3 =
1117-
DatasetInfo.newBuilder(ROUTINE_DATASET).setDescription("java routine lifecycle").build();
1118-
bigquery.create(info3);
1119-
1120-
LoadJobConfiguration configuration =
1121-
LoadJobConfiguration.newBuilder(
1122-
TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json())
1123-
.setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED)
1124-
.setSchema(TABLE_SCHEMA)
1125-
.setLabels(labels)
1126-
.build();
1127-
Job job = bigquery.create(JobInfo.of(configuration));
1128-
job = job.waitFor();
1129-
assertNull(job.getStatus().getError());
1130-
LoadJobConfiguration loadJobConfiguration = job.getConfiguration();
1131-
assertEquals(labels, loadJobConfiguration.getLabels());
1132-
1133-
LoadJobConfiguration configurationFastQuery =
1134-
LoadJobConfiguration.newBuilder(
1135-
TABLE_ID_FASTQUERY, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json())
1136-
.setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED)
1137-
.setSchema(TABLE_SCHEMA)
1138-
.setLabels(labels)
1139-
.build();
1140-
Job jobFastQuery = bigquery.create(JobInfo.of(configurationFastQuery));
1141-
jobFastQuery = jobFastQuery.waitFor();
1142-
assertNull(jobFastQuery.getStatus().getError());
1143-
1144-
LoadJobConfiguration configFastQueryBQResultset =
1145-
LoadJobConfiguration.newBuilder(
1146-
TABLE_ID_FASTQUERY_BQ_RESULTSET,
1147-
"gs://" + BUCKET + "/" + JSON_LOAD_FILE_BQ_RESULTSET,
1148-
FormatOptions.json())
1149-
.setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED)
1150-
.setSchema(BQ_RESULTSET_SCHEMA)
1151-
.setLabels(labels)
1152-
.build();
1153-
Job jobFastQueryBQResultSet = bigquery.create(JobInfo.of(configFastQueryBQResultset));
1154-
jobFastQueryBQResultSet = jobFastQueryBQResultSet.waitFor();
1155-
assertNull(jobFastQueryBQResultSet.getStatus().getError());
1156-
1157-
LoadJobConfiguration configurationDDL =
1158-
LoadJobConfiguration.newBuilder(
1159-
TABLE_ID_DDL, "gs://" + BUCKET + "/" + JSON_LOAD_FILE_SIMPLE, FormatOptions.json())
1160-
.setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED)
1161-
.setSchema(DDL_TABLE_SCHEMA)
1162-
.setLabels(labels)
1163-
.build();
1164-
Job jobDDL = bigquery.create(JobInfo.of(configurationDDL));
1165-
jobDDL = jobDDL.waitFor();
1166-
assertNull(jobDDL.getStatus().getError());
1167-
1168-
LoadJobConfiguration configurationLargeTable =
1169-
LoadJobConfiguration.newBuilder(
1170-
TABLE_ID_LARGE, "gs://" + BUCKET + "/" + LOAD_FILE_LARGE, FormatOptions.csv())
1171-
.setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED)
1172-
.setSchema(LARGE_TABLE_SCHEMA)
1173-
.setLabels(labels)
1174-
.build();
1175-
Job jobLargeTable = bigquery.create(JobInfo.of(configurationLargeTable));
1176-
jobLargeTable = jobLargeTable.waitFor();
1177-
assertNull(jobLargeTable.getStatus().getError());
1178-
}
11791082

11801083
@AfterClass
11811084
public static void afterClass() throws ExecutionException, InterruptedException {
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
Args = --initialize-at-build-time=org.junit
2+
#Args = --initialize-at-build-time=org.mockito \
3+
#--initialize-at-build-time=net.bytebuddy.TypeCache$WithInlineExpunction

0 commit comments

Comments
 (0)