Exception encountered when calling layer "dense_features_5"

13,441

So I accidentally removed this line from df_to_dataset function:

  def df_to_dataset(df, shuffle=True, batch_size=32): 
    df = df.copy()
    labels = df.pop('class')
    dicts = {'STAR': 1, 'GALAXY': 2, 'QSO': 3}
    converted_labels = np.array([dicts[l] for l in labels])
    ds = tf.data.Dataset.from_tensor_slices((dict(df), converted_labels))
    if shuffle:
      ds = ds.shuffle(buffer_size=len(df))
      ds = ds.batch(batch_size)     # this one
    return ds

Now everything works fine :)

UPDATE: you CAN pass all data you have, not batches, but in tensorflow it is expected that you pass batches

Share:
13,441
Elizabeth Grant
Author by

Elizabeth Grant

Updated on June 04, 2022

Comments

  • Elizabeth Grant
    Elizabeth Grant almost 2 years

    I have multi-class classification (3 classes), thus 3 neurons in the output layer, all columns are numeric. And got a mistake I can't understand. Here's my code:

    def df_to_dataset(df, shuffle=True, batch_size=32): 
      df = df.copy()
      labels = df.pop('class')
      dicts = {'STAR': 1, 'GALAXY': 2, 'QSO': 3}
      converted_labels = np.array([dicts[l] for l in labels])
      ds = tf.data.Dataset.from_tensor_slices((dict(df), converted_labels))
      if shuffle:
        ds = ds.shuffle(buffer_size=len(df))
      return ds
    
    batch_size = 32
    train_ds = df_to_dataset(train, batch_size=batch_size)
    val_ds = df_to_dataset(val, shuffle=False, batch_size=batch_size)
    test_ds = df_to_dataset(test, shuffle=False, batch_size=batch_size)
    
    feature_columns = []
    for numeric_col in ['objid', 'ra', 'dec', 'u', 'g', 'r', 'i', 'z', 'run', 'rerun', 'camcol', 'field', 'specobjid', 'redshift', 'plate', 'mjd', 'fiberid']:
      feature_columns.append(feature_column.numeric_column(numeric_col))
    
    feature_layer = DenseFeatures(feature_columns) # A layer that produces a dense Tensor
    model = Sequential([
      feature_layer,
      Dense(32, activation='relu'),
      Dense(3, activation='softmax')
    ])
    
    model.compile(optimizer='adam',
                  loss='sparse_categorical_crossentropy',
                  metrics=['accuracy'])
    
    model.fit(train_ds,
              validation_data=val_ds,
              epochs=10)
    

    And here's an error:

    ValueError: in user code:

    File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 878, in train_function  *
        return step_function(self, iterator)
    File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 867, in step_function  **
        outputs = model.distribute_strategy.run(run_step, args=(data,))
    File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 860, in run_step  **
        outputs = model.train_step(data)
    File "/usr/local/lib/python3.7/dist-packages/keras/engine/training.py", line 808, in train_step
        y_pred = self(x, training=True)
    File "/usr/local/lib/python3.7/dist-packages/keras/utils/traceback_utils.py", line 67, in error_handler
        raise e.with_traceback(filtered_tb) from None
    
    ValueError: Exception encountered when calling layer "dense_features_5" (type DenseFeatures).
    
    Feature (key: camcol) cannot have rank 0. Given: Tensor("IteratorGetNext:0", shape=(), dtype=int64)
    

    Please tell me what can it be?