microsoft / CNTK

Microsoft Cognitive Toolkit (CNTK), an open source deep-learning toolkit
https://docs.microsoft.com/cognitive-toolkit/
Other
17.49k stars 4.29k forks source link

RegisterConfigurableRuntimeType: Attempted to register type 'ComputationNode' twice #2828

Open cmciris opened 6 years ago

cmciris commented 6 years ago

terminate called after throwing an instance of 'Microsoft::MSR::CNTK::ExceptionWithCallStack' what(): RegisterConfigurableRuntimeType: Attempted to register type 'ComputationNode' twice.

What's wrong with it?

eldakms commented 6 years ago

could you please provide more context when this error appears? I have seen this when you try to load two libraries that both have ComputationNode statics.

cmciris commented 6 years ago

this error happened at the begging of my program , may be in static ? this is where i use cntk

static CNTK::FunctionPtr nn_policy = nullptr; static CNTK::FunctionPtr nn_value = nullptr;

static CNTK::DeviceDescriptor GetDevice() { if (device_id == -1) return CNTK::DeviceDescriptor::CPUDevice(); if (device_id == -2) return CNTK::DeviceDescriptor::UseDefaultDevice(); return CNTK::DeviceDescriptor::GPUDevice(device_id); }

void ReadWeights() { wchar_t name[1024]; mbstate_t ps; memset(&ps, 0, sizeof(ps)); const char * src = uct_params_path; mbsrtowcs(name, &src, 1024, &ps); wstring path = name;

cerr << "Init CNTK" << endl;

CNTK::DeviceDescriptor device = GetDevice();

wstring policy_name = path; policy_name += L"/model2.bin"; nn_policy = CNTK::Function::Load(policy_name, device);

wstring value_name = path; value_name += L"/model3.bin"; nn_value = CNTK::Function::Load(value_name, device);

if (!nn_policy || !nn_value) { cerr << "Get EvalModel failed\n"; }

if 0

wcerr << L"POLICY" << endl; for (auto var : nn_policy->Inputs()) { wcerr << var.AsString() << endl; } for (auto var : nn_policy->Outputs()) { wcerr << var.AsString() << endl; } wcerr << L"VALUE" << endl; for (auto var : nn_value->Inputs()) { wcerr << var.AsString() << endl; } for (auto var : nn_value->Outputs()) { wcerr << var.AsString() << endl; }

endif

cerr << "ok" << endl; }

bool GetVariableByName(vector variableLists, wstring varName, CNTK::Variable& var) { for (vector::iterator it = variableLists.begin(); it != variableLists.end(); ++it) { if (it->Name().compare(varName) == 0) { var = *it; return true; } } return false; }

inline bool GetInputVariableByName(CNTK::FunctionPtr evalFunc, wstring varName, CNTK::Variable& var) { return GetVariableByName(evalFunc->Arguments(), varName, var); }

inline bool GetOutputVaraiableByName(CNTK::FunctionPtr evalFunc, wstring varName, CNTK::Variable& var) { return GetVariableByName(evalFunc->Outputs(), varName, var); }

void EvalPolicy(const std::vector<std::shared_ptr>& requests, std::vector& data_basic, std::vector& data_features, std::vector& data_history, std::vector& data_color, std::vector& data_komi) { if (requests.size() == 0) return;

CNTK::Variable var_basic, var_features, var_history, var_color, var_komi; GetInputVariableByName(nn_policy, L"basic", var_basic); GetInputVariableByName(nn_policy, L"features", var_features); GetInputVariableByName(nn_policy, L"history", var_history); GetInputVariableByName(nn_policy, L"color", var_color); GetInputVariableByName(nn_policy, L"komi", var_komi);

CNTK::Variable var_ol; GetOutputVaraiableByName(nn_policy, L"ol", var_ol);

size_t num_req = requests.size();

CNTK::NDShape shape_basic = var_basic.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_basic = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_basic, data_basic, true)); CNTK::NDShape shape_features = var_features.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_features = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_features, data_features, true)); CNTK::NDShape shape_history = var_history.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_history = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_history, data_history, true)); CNTK::NDShape shape_color = var_color.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_color = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_color, data_color, true)); //CNTK::NDShape shape_komi = var_komi.Shape().AppendShape({ 1, num_req }); //CNTK::ValuePtr value_komi = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_komi, data_komi, true));

CNTK::ValuePtr value_ol;

//CNTK::DeviceDescriptor device = CNTK::DeviceDescriptor::GPUDevice(device_id); CNTK::DeviceDescriptor device = GetDevice(); std::unordered_map<CNTK::Variable, CNTK::ValuePtr> inputs = { { var_basic, value_basic }, { var_features, value_features }, { var_history, value_history }, //{ var_color, value_color }, //{ var_komi, value_komi }, }; std::unordered_map<CNTK::Variable, CNTK::ValuePtr> outputs = { { var_ol, value_ol } };

try { nn_policy->Forward(inputs, outputs, device); } catch (const std::exception& err) { fprintf(stderr, "Evaluation failed. EXCEPTION occurred: %s\n", err.what()); abort(); } catch (...) { fprintf(stderr, "Evaluation failed. Unknown ERROR occurred.\n"); abort(); }

value_ol = outputs[var_ol]; CNTK::NDShape shape_ol = var_ol.Shape().AppendShape({ 1, num_req }); vector moves(shape_ol.TotalSize()); CNTK::NDArrayViewPtr cpu_moves = CNTK::MakeSharedObject(shape_ol, moves, false); cpu_moves->CopyFrom(*value_ol->Data());

if (moves.size() != pure_board_max * num_req) { cerr << "Eval move error " << moves.size() << endl; return; }

for (int j = 0; j < requests.size(); j++) { const auto req = requests[j]; const int index = req->index; const int child_num = uct_node[index].child_num; child_node_t uct_child = uct_node[index].child; const int ofs = pure_board_max j;

LOCK_NODE(index);

int depth = req->depth;

if 0

if (index == current_root) {
  for (int i = 0; i < pure_board_max; i++) {
int x = i % pure_board_size;
int y = i / pure_board_size;
owner_nn[POS(x + OB_SIZE, y + OB_SIZE)] = ownern[i + ofs];
  }
}

endif

for (int i = 1; i < child_num; i++) {
  int pos = RevTransformMove(uct_child[i].pos, req->trans);

  int x = X(pos) - OB_SIZE;
  int y = Y(pos) - OB_SIZE;
  int n = x + y * pure_board_size;
  double score = moves[n + ofs];
  //if (depth == 1) cerr << "RAW POLICY " << uct_child[i].pos << " " << req->trans << " " << FormatMove(pos) << " " << x << "," << y << " " << ofs << " -> " << score << endl;
  if (uct_child[i].ladder) {
    score -= 4; // ~= 1.83%
  }

  uct_child[i].nnrate0 = score;
}

UpdatePolicyRate(index);
uct_node[index].evaled = true;

UNLOCK_NODE(index);

} eval_count_policy += requests.size(); }

void EvalValue(const std::vector<std::shared_ptr>& requests, std::vector& data_basic, std::vector& data_features, std::vector& data_history, std::vector& data_color, std::vector& data_komi, std::vector& data_safety) { if (requests.size() == 0) return;

CNTK::Variable var_basic, var_features, var_history, var_color, var_komi, var_safety; GetInputVariableByName(nn_value, L"basic", var_basic); GetInputVariableByName(nn_value, L"features", var_features); GetInputVariableByName(nn_value, L"history", var_history); GetInputVariableByName(nn_value, L"color", var_color); GetInputVariableByName(nn_value, L"komi", var_komi); GetInputVariableByName(nn_value, L"safety", var_safety);

CNTK::Variable var_p; GetOutputVaraiableByName(nn_value, L"p", var_p);

size_t num_req = requests.size();

CNTK::NDShape shape_basic = var_basic.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_basic = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_basic, data_basic, true)); CNTK::NDShape shape_features = var_features.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_features = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_features, data_features, true)); CNTK::NDShape shape_history = var_history.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_history = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_history, data_history, true)); CNTK::NDShape shape_color = var_color.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_color = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_color, data_color, true)); CNTK::NDShape shape_komi = var_komi.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_komi = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_komi, data_komi, true)); CNTK::NDShape shape_safety = var_safety.Shape().AppendShape({ 1, num_req }); CNTK::ValuePtr value_safety = CNTK::MakeSharedObject(CNTK::MakeSharedObject(shape_safety, data_safety, true));

CNTK::ValuePtr value_p;

//CNTK::DeviceDescriptor device = CNTK::DeviceDescriptor::GPUDevice(device_id); CNTK::DeviceDescriptor device = GetDevice(); std::unordered_map<CNTK::Variable, CNTK::ValuePtr> inputs = { { var_basic, value_basic }, { var_features, value_features }, { var_history, value_history }, { var_color, value_color }, { var_komi, value_komi }, { var_safety, value_safety }, }; std::unordered_map<CNTK::Variable, CNTK::ValuePtr> outputs = { { var_p, value_p } };

try { nn_value->Forward(inputs, outputs, device); } catch (const std::exception& err) { fprintf(stderr, "Evaluation failed. EXCEPTION occurred: %s\n", err.what()); abort(); } catch (...) { fprintf(stderr, "Evaluation failed. Unknown ERROR occurred.\n"); abort(); }

value_p = outputs[var_p]; CNTK::NDShape shape_p = var_p.Shape().AppendShape({ 1, num_req }); vector win(shape_p.TotalSize()); CNTK::NDArrayViewPtr cpu_p = CNTK::MakeSharedObject(shape_p, win, false); cpu_p->CopyFrom(*value_p->Data());

if (win.size() != requests.size()) { cerr << "Eval win error " << win.size() << endl; return; } //cerr << "Eval " << indices.size() << " " << path.size() << endl; for (int j = 0; j < requests.size(); j++) { auto req = requests[j];

double p = ((double)win[j] + 1) / 2;
if (p < 0)
  p = 0;
if (p > 1)
  p = 1;
//cerr << "#" << index << "  " << sum << endl;

double value = 1 - p;// color[j] == S_BLACK ? p : 1 - p;

req->uct_child->value = value;
for (int i = req->path.size() - 1; i >= 0; i--) {
  int current = req->path[i];
  if (current < 0)
break;

  atomic_fetch_add(&uct_node[current].value_move_count, 1);
  atomic_fetch_add(&uct_node[current].value_win, value);
  value = 1 - value;
}

} eval_count_value += requests.size(); }

eldakms commented 6 years ago

I do not see anything wrong that could cause this error. Could you please tell me which compiler your are using (exact version) and what OS you are on? Thanks!

cmciris commented 6 years ago

Compiler: gcc version 5.3.0 (GCC) OS: Ubuntu 16.04 LTS

eldakms commented 6 years ago

Did you compile cntk.core so yourself? if not - official build of cntk is using 4.8.3 which is not compatible with 5.3

cmciris commented 6 years ago

no, I download the binary pakage and follow the step here: https://docs.microsoft.com/en-us/cognitive-toolkit/setup-linux-binary-script

cmciris commented 6 years ago

the pakage is CNTK for Linux v.2.1 GPU

eldakms commented 6 years ago

This won't work. you will need the 4.8 compiler as described here: https://docs.microsoft.com/en-us/cognitive-toolkit/setup-cntk-on-linux#c-compiler

or compile everything yourself with 5.3, but i'm not sure whether this is currently supported, i'm not on CNTK team.

cmciris commented 6 years ago

But it works on another computer, the only difference is the version of g++ is 5.4 .

eldakms commented 6 years ago

Strange, I would expect this to fail on any binary compiled with gcc 5. CNTK has C++ on the boundaries, so compiler versions are not compatible, this has nothing to do with CNTK, any C++ library always provides sos for particular compiler versions (i.e. open cv)

Maybe something changed in CNTK and they now provide gcc 5 binaries, I'm not aware of this, i'm not on the team. I would try compiling your program with 4.8 and see whether you bump into the same problem. Should be as easy as apt getting another gcc version.

ke1337 commented 6 years ago

Can you try a more recent release and see if the problem still exists?