-1
\$\begingroup\$

This is an implementation of a graph neural network.

Edges are represented by an egde-list.

### GNN taking edge list as an input import numpy as np import pandas as pd import tensorflow as tf from tensorflow import keras from tensorflow.keras import layers import networkx as nx import matplotlib.pyplot as plt # Sample data nodes_data = { 'node_id': [1, 2, 3, 4, 5], 'age': [25, 30, 22, 28, 35], 'gender': [0, 1, 0, 1, 0], # 0: Male, 1: Female 'occupation': [0, 1, 2, 3, 4] # Encoded occupations } edges_data = { 'edge_id': [1, 2, 3, 4, 5], 'source': [1, 2, 3, 4, 1], 'target': [2, 3, 4, 5, 3], 'relationship_strength': [0.8, 0.6, 0.7, 0.9, 0.5] } nodes_df = pd.DataFrame(nodes_data) edges_df = pd.DataFrame(edges_data) # Convert to zero-based indexing nodes_df['node_id'] = nodes_df['node_id'] - 1 edges_df['source'] = edges_df['source'] - 1 edges_df['target'] = edges_df['target'] - 1 # Feature and target preparation features = nodes_df[['age', 'gender', 'occupation']].to_numpy() num_features = features.shape[1] num_nodes = features.shape[0] # Assume some target labels for demonstration target_labels = np.array([0, 1, 0, 1, 0]) # Example target labels (e.g., binary classification) # Graph construction graph = nx.from_pandas_edgelist(edges_df, source='source', target='target') adj_matrix = nx.adjacency_matrix(graph).todense() class GraphConvLayer(layers.Layer): def __init__(self, output_dim, **kwargs): super(GraphConvLayer, self).__init__(**kwargs) self.output_dim = output_dim def build(self, input_shape): print("build() input_shape:", input_shape) self.kernel = self.add_weight( shape=(input_shape[0][-1], self.output_dim), initializer='glorot_uniform', name='kernel' ) def call(self, inputs): features, adj_matrix = inputs adj_matrix_dense = tf.convert_to_tensor(adj_matrix, dtype=tf.float32) # Ensure adj_matrix is dense output = tf.matmul(adj_matrix_dense, features) output = tf.matmul(output, self.kernel) return output def create_gnn_model(input_shape, output_dim, num_nodes): features_input = keras.Input(shape=(num_nodes, input_shape), name='features') adj_matrix_input = keras.Input(shape=(num_nodes, num_nodes), name='adj_matrix') x = GraphConvLayer(16)([features_input, adj_matrix_input]) x = layers.ReLU()(x) x = GraphConvLayer(output_dim)([x, adj_matrix_input]) return keras.Model(inputs=[features_input, adj_matrix_input], outputs=x) gnn_model = create_gnn_model(num_features, 2, num_nodes) gnn_model.compile( optimizer=keras.optimizers.Adam(learning_rate=0.01), loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=[keras.metrics.SparseCategoricalAccuracy(name='acc')] ) # Data preparation for training adj_matrix_input = np.array(adj_matrix).astype(np.float32) features_input = features.astype(np.float32) # Expand dimensions to match the input shape (batch size, num_nodes, num_features) features_input = np.expand_dims(features_input, axis=0) adj_matrix_input = np.expand_dims(adj_matrix_input, axis=0) target_labels = np.expand_dims(target_labels, axis=0) # Expand dimensions of target_labels to match the batch size print("features_input shape:", features_input.shape) print("adj_matrix_input shape:", adj_matrix_input.shape) print("target_labels shape:", target_labels.shape) # Train the model history = gnn_model.fit( x=[features_input, adj_matrix_input], y=target_labels, epochs=100, batch_size=1, validation_split=0 # Set validation_split to 0 ) plt.plot(history.history['loss']) plt.xlabel('Epochs') plt.ylabel('Loss') plt.show() plt.plot(history.history['acc']) plt.xlabel('Epochs') plt.ylabel('Accuracy') plt.show() ``` 
\$\endgroup\$
0

1 Answer 1

0
\$\begingroup\$

This is substantially the same code as from this morning's earlier submission.

It adopts none of the recommended changes. Node adjacency is represented in a slightly different way, yet none of the common logic has been factored out and reused. We see no """docstrings""" explaining caller's responsibility and expected output. If there is some improvement in this version 2, I am not yet seeing it.

\$\endgroup\$

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.