|
311 | 311 | }, |
312 | 312 | "outputs": [], |
313 | 313 | "source": [ |
314 | | - "# Create an analysis object over the java application\n", |
| 314 | + "# Create an analysis object for the Java application\n", |
315 | 315 | "analysis = cldk.analysis(project_path=\"/tmp/commons-cli-rel-commons-cli-1.7.0\", analysis_level=AnalysisLevel.symbol_table)" |
316 | 316 | ] |
317 | 317 | }, |
|
356 | 356 | }, |
357 | 357 | "outputs": [], |
358 | 358 | "source": [ |
359 | | - "# For simplicity, we run the code summarization for a single class and method (this filter can be removed to run this code over the entire application)\n", |
| 359 | + "# For simplicity, we run the code summarization on a single class and method (this filter can be removed to run this code over the entire application)\n", |
360 | 360 | "target_class = \"org.apache.commons.cli.GnuParser\"\n", |
361 | 361 | "target_method = \"flatten(Options, String[], boolean)\"\n", |
362 | 362 | "\n", |
|
366 | 366 | " class_file_path = analysis.get_java_file(qualified_class_name=class_name)\n", |
367 | 367 | "\n", |
368 | 368 | " # Read code for the class\n", |
369 | | - " with open(class_file_path, 'r') as f:\n", |
| 369 | + " with open(class_file_path, \"r\") as f:\n", |
370 | 370 | " code_body = f.read()\n", |
371 | 371 | "\n", |
372 | 372 | " # Initialize treesitter utils for the class file content\n", |
|
387 | 387 | " instruction = format_inst(\n", |
388 | 388 | " code=sanitized_class,\n", |
389 | 389 | " focal_method=method_details.declaration,\n", |
390 | | - " focal_class=class_name.split('.')[-1],\n", |
| 390 | + " focal_class=class_name.split(\".\")[-1],\n", |
391 | 391 | " language=\"java\"\n", |
392 | 392 | " )\n", |
393 | 393 | " \n", |
394 | 394 | " print(f\"Instruction:\\n{instruction}\\n\")\n", |
395 | | - " print(f\"Generating code summary . . .\\n\")\n", |
| 395 | + " print(f\"Generating code summary ...\\n\")\n", |
396 | 396 | " \n", |
397 | 397 | " # Prompt the local model on Ollama\n", |
398 | | - " llm_output = prompt_ollama(\n", |
399 | | - " message=instruction\n", |
400 | | - " )\n", |
| 398 | + " llm_output = prompt_ollama(message=instruction)\n", |
401 | 399 | " \n", |
402 | 400 | " # Print the LLM output\n", |
403 | 401 | " print(f\"LLM Output:\\n{llm_output}\")" |
|
0 commit comments