Skip to content

Commit

Permalink
Merge pull request #1175 from lingbai-kong/ndarrayload
Browse files Browse the repository at this point in the history
optimize: temporal complexity of Imdb dataset loader
  • Loading branch information
Oceania2018 authored Sep 15, 2023
2 parents eb49be0 + 628b2ce commit c814fe1
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 35 deletions.
48 changes: 21 additions & 27 deletions src/TensorFlowNET.Keras/Datasets/Imdb.cs
Original file line number Diff line number Diff line change
Expand Up @@ -116,23 +116,13 @@ public DatasetPass load_data(
for (var i = 0; i < x_train_array.GetLength(0); i++)
{
new_x_train_array[i, 0] = (int)start_char;
for (var j = 0; j < x_train_array.GetLength(1); j++)
{
if (x_train_array[i, j] == 0)
break;
new_x_train_array[i, j + 1] = x_train_array[i, j];
}
Array.Copy(x_train_array, i * x_train_array.GetLength(1), new_x_train_array, i * new_x_train_array.GetLength(1) + 1, x_train_array.GetLength(1));
}
int[,] new_x_test_array = new int[x_test_array.GetLength(0), x_test_array.GetLength(1) + 1];
for (var i = 0; i < x_test_array.GetLength(0); i++)
{
new_x_test_array[i, 0] = (int)start_char;
for (var j = 0; j < x_test_array.GetLength(1); j++)
{
if (x_test_array[i, j] == 0)
break;
new_x_test_array[i, j + 1] = x_test_array[i, j];
}
Array.Copy(x_test_array, i * x_test_array.GetLength(1), new_x_test_array, i * new_x_test_array.GetLength(1) + 1, x_test_array.GetLength(1));
}
x_train_array = new_x_train_array;
x_test_array = new_x_test_array;
Expand Down Expand Up @@ -163,15 +153,19 @@ public DatasetPass load_data(
{
maxlen = max(x_train_array.GetLength(1), x_test_array.GetLength(1));
}
(x_train, labels_train) = data_utils._remove_long_seq((int)maxlen, x_train_array, labels_train_array);
(x_test, labels_test) = data_utils._remove_long_seq((int)maxlen, x_test_array, labels_test_array);
if (x_train.size == 0 || x_test.size == 0)
(x_train_array, labels_train_array) = data_utils._remove_long_seq((int)maxlen, x_train_array, labels_train_array);
(x_test_array, labels_test_array) = data_utils._remove_long_seq((int)maxlen, x_test_array, labels_test_array);
if (x_train_array.Length == 0 || x_test_array.Length == 0)
throw new ValueError("After filtering for sequences shorter than maxlen=" +
$"{maxlen}, no sequence was kept. Increase maxlen.");

var xs = np.concatenate(new[] { x_train, x_test });
var labels = np.concatenate(new[] { labels_train, labels_test });
var xs_array = (int[,])xs.ToMultiDimArray<int>();
int[,] xs_array = new int[x_train_array.GetLength(0) + x_test_array.GetLength(0), (int)maxlen];
Array.Copy(x_train_array, xs_array, x_train_array.Length);
Array.Copy(x_test_array, 0, xs_array, x_train_array.Length, x_train_array.Length);

long[] labels_array = new long[labels_train_array.Length + labels_test_array.Length];
Array.Copy(labels_train_array, labels_array, labels_train_array.Length);
Array.Copy(labels_test_array, 0, labels_array, labels_train_array.Length, labels_test_array.Length);

if (num_words == null)
{
Expand All @@ -197,7 +191,7 @@ public DatasetPass load_data(
new_xs_array[i, j] = (int)oov_char;
}
}
xs = new NDArray(new_xs_array);
xs_array = new_xs_array;
}
else
{
Expand All @@ -211,19 +205,19 @@ public DatasetPass load_data(
new_xs_array[i, k++] = xs_array[i, j];
}
}
xs = new NDArray(new_xs_array);
xs_array = new_xs_array;
}

var idx = len(x_train);
x_train = xs[$"0:{idx}"];
x_test = xs[$"{idx}:"];
var y_train = labels[$"0:{idx}"];
var y_test = labels[$"{idx}:"];
Array.Copy(xs_array, x_train_array, x_train_array.Length);
Array.Copy(xs_array, x_train_array.Length, x_test_array, 0, x_train_array.Length);

Array.Copy(labels_array, labels_train_array, labels_train_array.Length);
Array.Copy(labels_array, labels_train_array.Length, labels_test_array, 0, labels_test_array.Length);

return new DatasetPass
{
Train = (x_train, y_train),
Test = (x_test, y_test)
Train = (x_train_array, labels_train_array),
Test = (x_test_array, labels_test_array)
};
}

Expand Down
14 changes: 6 additions & 8 deletions src/TensorFlowNET.Keras/Utils/data_utils.cs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public static string get_file(string fname, string origin,
return datadir;
}

public static (NDArray, NDArray) _remove_long_seq(int maxlen, NDArray seq, NDArray label)
public static (int[,], long[]) _remove_long_seq(int maxlen, int[,] seq, long[] label)
{
/*Removes sequences that exceed the maximum length.
Expand All @@ -56,19 +56,17 @@ public static (NDArray, NDArray) _remove_long_seq(int maxlen, NDArray seq, NDArr
List<int[]> new_seq = new List<int[]>();
List<long> new_label = new List<long>();

var seq_array = (int[,])seq.ToMultiDimArray<int>();
var label_array = (long[])label.ToArray<long>();
for (var i = 0; i < seq_array.GetLength(0); i++)
for (var i = 0; i < seq.GetLength(0); i++)
{
if (maxlen < seq_array.GetLength(1) && seq_array[i,maxlen] != 0)
if (maxlen < seq.GetLength(1) && seq[i, maxlen] != 0)
continue;
int[] sentence = new int[maxlen];
for (var j = 0; j < maxlen && j < seq_array.GetLength(1); j++)
for (var j = 0; j < maxlen && j < seq.GetLength(1); j++)
{
sentence[j] = seq_array[i, j];
sentence[j] = seq[i, j];
}
new_seq.Add(sentence);
new_label.Add(label_array[i]);
new_label.Add(label[i]);
}

int[,] new_seq_array = new int[new_seq.Count, maxlen];
Expand Down

0 comments on commit c814fe1

Please sign in to comment.