Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cleanup #37

Merged
merged 13 commits into from
May 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/black.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,4 +64,4 @@ jobs:

- name: Format with black
run: |
poetry run black 'p6' --check
poetry run black nfopt --check
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ The optimizer can generate a baseline using uniform path splitting ratios for tr
### Data Format
Before you can start using the tool, ensure the following steps are completed:

1. **Data Formatting:** Make sure your data is formatted according to the examples provided in the `example-data` directory.
1. **Data Formatting:** Make sure your data is formatted according to the examples provided in the `sample-data` directory.
2. **Environment Variables:** Set the environment variable for the data directory correctly in the `variables.env` file. You can use the `variables.env-example` file as a reference for the required environment variables and their values. Rename `variables.env-example` to `.env` and update it with the appropriate information.

### Usage
Expand Down
10 changes: 0 additions & 10 deletions depth_stats.csv

This file was deleted.

File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,15 @@
from collections import deque

from dotenv import load_dotenv
from p6.utils import log
from p6.utils import data as dataUtils

from nfopt.utils import log
from nfopt.utils import data as dataUtils

logger = log.setupCustomLogger(__name__)
load_dotenv("variables.env")

NETFLOW_FLOW_THRESHOLD = float(os.getenv("NETFLOW_FLOW_THRESHOLD"))
NETFLOW_PATHS_THRESHOLD = float(os.getenv("NETFLOW_PATHS_THRESHOLD"))

options = {
"WLSACCESSID": os.getenv("WLSACCESSID"),
"WLSSECRET": os.getenv("WLSSECRET"),
Expand All @@ -19,9 +21,24 @@


def optMC(parserArgs, links, flowTraffic, timestamp):
with gp.Env(params=options) as env, gp.Model(env=env) as m:
m = gp.Model("netflow", env=env)
"""
Runs multi-commodity flow problem optimization on the data using Gurobi. Writes a file with the new paths and their ratios.

### Parameters:
----------
#### parserArgs: argparse.Namespace
The parser arguments.

#### links: dict
The links in the network, indexed by linkName.

#### flowTraffic: dict
The traffic for each source-destination pair.

#### timestamp: string
The timestamp for the current data.
"""
with gp.Env(params=options) as env, gp.Model("netflow", env=env) as m:
nodes = []
edges = []
for link in links:
Expand All @@ -37,9 +54,11 @@ def optMC(parserArgs, links, flowTraffic, timestamp):
sorted_flowTraffic = sorted(
flowTraffic.items(), key=lambda item: item[1], reverse=True
)

total_demand = sum(flowTraffic.values())
percentage = 0.2 # TODO: Add this as a parameter
percentage = NETFLOW_FLOW_THRESHOLD
demand_threshold = total_demand * percentage

cumulative_demand = 0
significant_flowTraffic = {}
for flow, value in sorted_flowTraffic:
Expand Down Expand Up @@ -114,7 +133,7 @@ def optMC(parserArgs, links, flowTraffic, timestamp):
m.optimize()

# Define the threshold percentage (e.g., 10%)
threshold_percentage = 0.001
threshold_percentage = 1 - NETFLOW_PATHS_THRESHOLD

if m.Status == gp.GRB.OPTIMAL:
solution = m.getAttr("X", flowVars)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
from gurobipy import GRB
from dotenv import load_dotenv

from p6.calc_type_enum import CalcType
from p6.utils import log
from p6.utils import data as dataUtils
from nfopt.calc_type_enum import CalcType
from nfopt.utils import log
from nfopt.utils import data as dataUtils

logger = log.setupCustomLogger(__name__)

Expand Down Expand Up @@ -52,10 +52,7 @@ def runLinearOptimizationModel(
logger.info("Started running linear optimization model...")
model = parserArgs.model_type

with gp.Env(params=options) as env, gp.Model(env=env) as m:
# Create optimization model based on the input model
m = gp.Model("network_optimization", env=env)

with gp.Env(params=options) as env, gp.Model("network_optimization", env=env) as m:
# Decision variables for path ratios for each source-destination pair
path_ratios = m.addVars(
[
Expand Down
12 changes: 6 additions & 6 deletions p6/main.py → nfopt/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@

from multiprocessing import set_start_method

from p6.calc_type_enum import CalcType
from p6.utils import data as dataUtils
from p6.utils import network as nwUtils
from p6.utils import log
from p6.linear_optimization import netflow, optimizer as linOpt
from nfopt.calc_type_enum import CalcType
from nfopt.utils import data as dataUtils
from nfopt.utils import network as nwUtils
from nfopt.utils import log
from nfopt.linear_optimization import netflow, optimizer as linOpt

logger = log.setupCustomLogger(__name__)

Expand Down Expand Up @@ -211,7 +211,7 @@ def main():
],
)

if results[0] is not None:
if results is not None:
results.sort()

dataUtils.writeDataToFile(
Expand Down
File renamed without changes.
5 changes: 2 additions & 3 deletions p6/utils/data.py → nfopt/utils/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import pandas as pd
import multiprocessing as mp

from p6.utils import log
from nfopt.utils import log
from functools import partial
from datetime import datetime
from dotenv import load_dotenv
Expand Down Expand Up @@ -134,8 +134,7 @@ def readLinks():

logger.info("Reading links...")
dataCapacity = pd.read_csv(
f"{DATASET_PATH}/{DATASET_LINKS_NAME}.csv.gz",
compression="gzip",
f"{DATASET_PATH}/{DATASET_LINKS_NAME}.csv",
names=["linkStart", "linkEnd", "capacity"],
skiprows=1,
engine="pyarrow",
Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion p6/utils/network.py → nfopt/utils/network.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from p6.utils import log
from nfopt.utils import log

logger = log.setupCustomLogger(__name__)

Expand Down
Loading