Skip to content

Commit

Permalink
Merge branch 'swebench_di' of github.com:stellaHSR/MetaGPT into swebe…
Browse files Browse the repository at this point in the history
…nch_di
  • Loading branch information
stellaHSR committed Mar 22, 2024
2 parents 6f92e70 + c8a5110 commit 1e6d145
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 16 deletions.
14 changes: 0 additions & 14 deletions data/inference/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,20 +45,6 @@ def read_sub_set_instance(path=SUBSET_DATASET, tag="scikit-learn"):
fail_filters = df["instance_id_fail"].tolist()
pass_filters = [s for s in pass_filters if tag in s]
fail_filters = [s for s in fail_filters if tag in s]
print(pass_filters)
print(fail_filters)
# Filter for instances containing the tag in either column
# pass_filter = df["instance_id_pass"].str.contains(tag, na=False)
# fail_filter = df["instance_id_fail"].str.contains(tag, na=False)

# Combine the filters using | (OR operator) for efficiency
# combined_filter = pass_filters | fail_filters

# print(df[combined_filter])
# Apply combined filter and select the specific columns
# filtered_df = df[combined_filter][["instance_id_pass", "instance_id_fail"]]

# Flatten the DataFrame into a list and remove NaN values
subset_instance = pass_filters + fail_filters

return subset_instance
Expand Down
2 changes: 0 additions & 2 deletions swe_bench/inference/run_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ async def openai_inference(
logger.info(f"{repo_prefix}_{version}")
data.append(f"{repo_prefix}_{version}")

# import pdb;pdb.set_trace()
response = await run_instance(instance=datum)
if response is None:
continue
Expand All @@ -65,7 +64,6 @@ async def openai_inference(
output_dict["full_output"] = response
output_dict["model_patch"] = extract_diff(response)
print(json.dumps(output_dict), file=f, flush=True)
# print(data)


async def main(
Expand Down

0 comments on commit 1e6d145

Please sign in to comment.