Skip to content

Commit cb95ef9

Browse files
author
Lingling Peng
committed
make upload_file_handle_id required; add default to import_csv_async
1 parent 90eb585 commit cb95ef9

1 file changed

Lines changed: 13 additions & 11 deletions

File tree

synapseclient/models/curation.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1077,19 +1077,19 @@ class UploadToTablePreviewRequest(AsynchronousCommunicator):
10771077
This response is modeled from: <https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/table/UploadToTablePreviewResult.html>
10781078
"""
10791079

1080+
upload_file_handle_id: str
1081+
"""The ID of the file handle for a type of UPLOAD"""
1082+
10801083
concrete_type: str = UPLOAD_TO_TABLE_PREVIEW_REQUEST
10811084
"""The concrete type for this request."""
10821085

1083-
upload_file_handle_id: Optional[str] = None
1084-
"""The ID of the file handle for a type of UPLOAD"""
1085-
10861086
lines_to_skip: Optional[int] = None
10871087
"""The number of lines to skip from the start of the file. The default value of 0 will be used if this is not provided by the caller."""
10881088

10891089
csv_table_descriptor: CsvTableDescriptor = field(default_factory=CsvTableDescriptor)
10901090
"""The description of a csv for upload or download."""
10911091

1092-
do_full_file_scan: Optional[bool] = False
1092+
do_full_file_scan: Optional[bool] = None
10931093
"""When set to true the full file will be scanned for a schema suggestions. A full scan is more accurate but can take more time. When set to false only a sub-set of the first rows will be scanned, which can be faster but is less accurate. The default value is false."""
10941094

10951095
# Response fields (populated by fill_from_dict)
@@ -1141,11 +1141,8 @@ def to_synapse_request(self) -> Dict[str, Any]:
11411141
"uploadFileHandleId": self.upload_file_handle_id,
11421142
"linesToSkip": self.lines_to_skip,
11431143
"doFullFileScan": self.do_full_file_scan,
1144+
"csvTableDescriptor": self.csv_table_descriptor.to_synapse_request(),
11441145
}
1145-
if self.csv_table_descriptor is not None:
1146-
request_dict["csvTableDescriptor"] = (
1147-
self.csv_table_descriptor.to_synapse_request()
1148-
)
11491146
delete_none_keys(request_dict)
11501147
return request_dict
11511148

@@ -2050,7 +2047,7 @@ async def main():
20502047
async def import_csv_async(
20512048
self,
20522049
file_handle_id: str,
2053-
csv_table_descriptor: Optional[CsvTableDescriptor] = None,
2050+
csv_table_descriptor: Optional[CsvTableDescriptor] = CsvTableDescriptor(),
20542051
*,
20552052
timeout: int = 120,
20562053
synapse_client: Optional[Synapse] = None,
@@ -2102,6 +2099,12 @@ async def main():
21022099
"session_id is required to import a CSV into a GridSession"
21032100
)
21042101

2102+
trace.get_current_span().set_attributes(
2103+
{
2104+
"synapse.session_id": self.session_id or "",
2105+
}
2106+
)
2107+
21052108
upload_to_table_preview = UploadToTablePreviewRequest(
21062109
csv_table_descriptor=csv_table_descriptor,
21072110
upload_file_handle_id=file_handle_id,
@@ -2116,9 +2119,8 @@ async def main():
21162119
session_id=self.session_id,
21172120
file_handle_id=file_handle_id,
21182121
schema=all_columns,
2122+
csv_descriptor=csv_table_descriptor,
21192123
)
2120-
if csv_table_descriptor:
2121-
import_request.csv_descriptor = csv_table_descriptor
21222124
import_response = await import_request.send_job_and_wait_async(
21232125
timeout=timeout, synapse_client=synapse_client
21242126
)

0 commit comments

Comments
 (0)