Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🎉 Official STGraph Documentation started #83

Merged
merged 26 commits into from
May 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
55d359e
🎉 Official STGraph Documentation started
nithinmanoj10 Oct 9, 2023
e0bdb16
📝 Added the installation guide to docs
nithinmanoj10 Oct 17, 2023
5f9f4ac
🚧 Dataset docs in progress
nithinmanoj10 Oct 17, 2023
c803d81
🚧 Cora Dataset documentation in progress
nithinmanoj10 Oct 18, 2023
2b4c5f6
📝 Added example to CoraDataLoader
nithinmanoj10 Oct 19, 2023
5d6bc16
🔧 Made changes to the installation guide
nithinmanoj10 Dec 24, 2023
dec1e48
📝 Changelogs page added
nithinmanoj10 Dec 25, 2023
e3b3235
🚧 GCN tutorial in progress
nithinmanoj10 Dec 25, 2023
77ff275
Merge pull request #93 from bfGraph/v1.1.0
nithinmanoj10 Jan 7, 2024
3b78ae9
➕ Added dataset/init.py back
nithinmanoj10 Jan 7, 2024
d4a8fa3
📝 Added all the datasets pydocs
nithinmanoj10 Jan 8, 2024
b6391c0
📝 Added references to dataset docstrings
nithinmanoj10 Jan 10, 2024
cea0795
➕ Added node and edge attributes for Dynamic datasets
nithinmanoj10 Jan 15, 2024
a747678
📝 Build graphs section added to Docs
nithinmanoj10 Jan 15, 2024
bd09b04
📝 Started docs for stgraph.graph
nithinmanoj10 Jan 16, 2024
ecb91ab
🔥 Removed all files in stgraph.dataset
nithinmanoj10 Jan 28, 2024
3b26e29
➕ Create __init__.py in stgraph.dataset module
nithinmanoj10 Jan 28, 2024
f546494
➕ Create __init__.py for dataset modules
nithinmanoj10 Jan 28, 2024
1f3324f
Merge pull request #104 from bfGraph/v1.1.0
nithinmanoj10 Jan 28, 2024
2f4fc6d
Merge pull request #107 from bfGraph/v1.1.0
nithinmanoj10 Jan 28, 2024
5ce2132
Merge pull request #109 from bfGraph/v1.1.0
nithinmanoj10 Jan 28, 2024
49008fe
🔨 Updated train.py to resolve merge conflict
nithinmanoj10 May 5, 2024
c7d696d
🔥 Deleted train.py to fix merge conflict
nithinmanoj10 May 5, 2024
5fca3b7
🔨 Created train.py to fix merge conflict
nithinmanoj10 May 5, 2024
f0f44bd
🔁 Merge pull request #120 from bfGraph/develop
nithinmanoj10 May 5, 2024
7c1b28c
:memo: Added docstrings for StaticGraph
nithinmanoj10 May 5, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,10 @@ benchmarking/results/
*.egg-info/
stgraph/build/
stgraph/dist/
dev-stgraph/
docs/_build
docs_old/
docs_v2/
__pycache__/
.DS_Store
build
Expand Down
12 changes: 12 additions & 0 deletions .idea/STGraph.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions .idea/inspectionProfiles/profiles_settings.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions .idea/modules.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions .idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

50 changes: 50 additions & 0 deletions .idea/workspace.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

148 changes: 86 additions & 62 deletions benchmarking/gat/seastar/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ def train(args):
cora = CoraDataLoader(verbose=True)

# To account for the initial CUDA Context object for pynvml
tmp = StaticGraph([(0,0)], [1], 1)
tmp = StaticGraph([(0, 0)], [1], 1)

features = torch.FloatTensor(cora.get_all_features())
labels = torch.LongTensor(cora.get_all_targets())
train_mask = cora.get_train_mask()
Expand All @@ -49,15 +49,15 @@ def train(args):

assert train_mask.shape[0] == num_nodes

print('dataset {}'.format("Cora"))
print('# of edges : {}'.format(num_edges))
print('# of nodes : {}'.format(num_nodes))
print('# of features : {}'.format(num_feats))
print("dataset {}".format("Cora"))
print("# of edges : {}".format(num_edges))
print("# of nodes : {}".format(num_nodes))
print("# of features : {}".format(num_feats))

features = torch.FloatTensor(features)
labels = torch.LongTensor(labels)

if hasattr(torch, 'BoolTensor'):
if hasattr(torch, "BoolTensor"):
train_mask = torch.BoolTensor(train_mask)

else:
Expand All @@ -74,17 +74,19 @@ def train(args):

# create model
heads = ([args.num_heads] * args.num_layers) + [args.num_out_heads]
model = GAT(g,
args.num_layers,
num_feats,
args.num_hidden,
n_classes,
heads,
F.elu,
args.in_drop,
args.attn_drop,
args.negative_slope,
args.residual)
model = GAT(
g,
args.num_layers,
num_feats,
args.num_hidden,
n_classes,
heads,
F.elu,
args.in_drop,
args.attn_drop,
args.negative_slope,
args.residual,
)
print(model)
if args.early_stop:
stopper = EarlyStopping(patience=100)
Expand All @@ -94,7 +96,8 @@ def train(args):

# use optimizer
optimizer = torch.optim.Adam(
model.parameters(), lr=args.lr, weight_decay=args.weight_decay)
model.parameters(), lr=args.lr, weight_decay=args.weight_decay
)

# initialize graph
dur = []
Expand All @@ -103,8 +106,8 @@ def train(args):
Used_memory = 0

for epoch in range(args.num_epochs):
#print('epoch = ', epoch)
#print('mem0 = {}'.format(mem0))
# print('epoch = ', epoch)
# print('mem0 = {}'.format(mem0))
torch.cuda.synchronize()
tf = time.time()
model.train()
Expand All @@ -120,7 +123,7 @@ def train(args):
torch.cuda.synchronize()
loss.backward()
optimizer.step()
t2 =time.time()
t2 = time.time()
run_time_this_epoch = t2 - tf

if epoch >= 3:
Expand All @@ -131,56 +134,77 @@ def train(args):

train_acc = accuracy(logits[train_mask], labels[train_mask])

#log for each step
print('Epoch {:05d} | Time(s) {:.4f} | train_acc {:.6f} | Used_Memory {:.6f} mb'.format(
epoch, run_time_this_epoch, train_acc, (now_mem * 1.0 / (1024**2))
))
# log for each step
print(
"Epoch {:05d} | Time(s) {:.4f} | train_acc {:.6f} | Used_Memory {:.6f} mb".format(
epoch, run_time_this_epoch, train_acc, (now_mem * 1.0 / (1024**2))
)
)

if args.early_stop:
model.load_state_dict(torch.load('es_checkpoint.pt'))
model.load_state_dict(torch.load("es_checkpoint.pt"))

#OUTPUT we need
avg_run_time = avg_run_time *1. / record_time
Used_memory /= (1024**3)
print('^^^{:6f}^^^{:6f}'.format(Used_memory, avg_run_time))
# OUTPUT we need
avg_run_time = avg_run_time * 1.0 / record_time
Used_memory /= 1024**3
print("^^^{:6f}^^^{:6f}".format(Used_memory, avg_run_time))

if __name__ == '__main__':

parser = argparse.ArgumentParser(description='GAT')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="GAT")

# COMMENT IF SNOOP IS TO BE ENABLED
snoop.install(enabled=False)

parser.add_argument("--gpu", type=int, default=0,
help="which GPU to use. Set -1 to use CPU.")
parser.add_argument("--num_epochs", type=int, default=200,
help="number of training epochs")
parser.add_argument("--num_heads", type=int, default=8,
help="number of hidden attention heads")
parser.add_argument("--num_out_heads", type=int, default=1,
help="number of output attention heads")
parser.add_argument("--num_layers", type=int, default=1,
help="number of hidden layers")
parser.add_argument("--num_hidden", type=int, default=32,
help="number of hidden units")
parser.add_argument("--residual", action="store_true", default=False,
help="use residual connection")
parser.add_argument("--in_drop", type=float, default=.6,
help="input feature dropout")
parser.add_argument("--attn_drop", type=float, default=.6,
help="attention dropout")
parser.add_argument("--lr", type=float, default=0.005,
help="learning rate")
parser.add_argument('--weight_decay', type=float, default=5e-4,
help="weight decay")
parser.add_argument('--negative_slope', type=float, default=0.2,
help="the negative slope of leaky relu")
parser.add_argument('--early_stop', action='store_true', default=False,
help="indicates whether to use early stop or not")
parser.add_argument('--fastmode', action="store_true", default=False,
help="skip re-evaluate the validation set")
parser.add_argument(
"--gpu", type=int, default=0, help="which GPU to use. Set -1 to use CPU."
)
parser.add_argument(
"--num_epochs", type=int, default=200, help="number of training epochs"
)
parser.add_argument(
"--num_heads", type=int, default=8, help="number of hidden attention heads"
)
parser.add_argument(
"--num_out_heads", type=int, default=1, help="number of output attention heads"
)
parser.add_argument(
"--num_layers", type=int, default=1, help="number of hidden layers"
)
parser.add_argument(
"--num_hidden", type=int, default=32, help="number of hidden units"
)
parser.add_argument(
"--residual", action="store_true", default=False, help="use residual connection"
)
parser.add_argument(
"--in_drop", type=float, default=0.6, help="input feature dropout"
)
parser.add_argument(
"--attn_drop", type=float, default=0.6, help="attention dropout"
)
parser.add_argument("--lr", type=float, default=0.005, help="learning rate")
parser.add_argument("--weight_decay", type=float, default=5e-4, help="weight decay")
parser.add_argument(
"--negative_slope",
type=float,
default=0.2,
help="the negative slope of leaky relu",
)
parser.add_argument(
"--early_stop",
action="store_true",
default=False,
help="indicates whether to use early stop or not",
)
parser.add_argument(
"--fastmode",
action="store_true",
default=False,
help="skip re-evaluate the validation set",
)
args = parser.parse_args()

print(args)

train(args)
4 changes: 2 additions & 2 deletions docs/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
SOURCEDIR = source
BUILDDIR = build

# Put it first so that "make" without argument is like "make help".
help:
Expand Down
Binary file removed docs/_static/Seastar_docs_logo.png
Binary file not shown.
16 changes: 0 additions & 16 deletions docs/_static/custom.css

This file was deleted.

2 changes: 0 additions & 2 deletions docs/api_references/seastar_datasets.rst

This file was deleted.

2 changes: 0 additions & 2 deletions docs/api_references/seastar_gnn.rst

This file was deleted.

2 changes: 0 additions & 2 deletions docs/api_references/seastar_temporal_gnn.rst

This file was deleted.

Loading
Loading