Skip to content

Commit

Permalink
remove shard_num config
Browse files Browse the repository at this point in the history
  • Loading branch information
seemingwang committed Mar 30, 2021
1 parent 771be00 commit 1a3013a
Show file tree
Hide file tree
Showing 4 changed files with 3 additions and 22 deletions.
1 change: 0 additions & 1 deletion paddle/fluid/distributed/ps.proto
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ message PSParameter {
optional ServerParameter server_param = 102;
repeated DownpourTrainerParameter trainer_param = 301;
optional FsClientParameter fs_client_param = 501;
optional int32 shard_num = 502;
}

message WorkerParameter {
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/distributed/service/graph_brpc_client.cc
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ std::future<int32_t> GraphBrpcClient::pull_graph_list(
return fut;
}
int32_t GraphBrpcClient::initialize() {
set_shard_num(_config.shard_num());
// set_shard_num(_config.shard_num());
BrpcPsClient::initialize();
server_size = get_server_nums();
graph_service = NULL;
Expand Down
3 changes: 1 addition & 2 deletions paddle/fluid/distributed/service/graph_py_service.cc
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ void GraphPyClient::start_client() {
(paddle::distributed::GraphBrpcClient*)
paddle::distributed::PSClientFactory::create(worker_proto));
worker_ptr->configure(worker_proto, dense_regions, _ps_env, client_id);
worker_ptr->set_shard_num(get_shard_num());
}
void GraphPyServer::start_server() {
std::string ip = server_list[rank];
Expand Down Expand Up @@ -119,7 +120,6 @@ void GraphPyServer::start_server() {
::paddle::distributed::PSParameter GraphPyServer::GetServerProto() {
// Generate server proto desc
::paddle::distributed::PSParameter server_fleet_desc;
server_fleet_desc.set_shard_num(get_shard_num());
::paddle::distributed::ServerParameter* server_proto =
server_fleet_desc.mutable_server_param();
::paddle::distributed::DownpourServerParameter* downpour_server_proto =
Expand Down Expand Up @@ -162,7 +162,6 @@ ::paddle::distributed::PSParameter GraphPyServer::GetServerProto() {

::paddle::distributed::PSParameter GraphPyClient::GetWorkerProto() {
::paddle::distributed::PSParameter worker_fleet_desc;
worker_fleet_desc.set_shard_num(get_shard_num());
::paddle::distributed::WorkerParameter* worker_proto =
worker_fleet_desc.mutable_worker_param();

Expand Down
19 changes: 1 addition & 18 deletions paddle/fluid/distributed/test/graph_node_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -214,11 +214,6 @@ void prepare_file(char file_name[], bool load_edge) {
ofile << x << std::endl;
}
}
// for(int i = 0;i < 10;i++){
// for(int j = 0;j < 10;j++){
// ofile<<i * 127 + j<<"\t"<<i <<"\t"<< 0.5<<std::endl;
// }
//}
ofile.close();
}
void GetDownpourSparseTableProto(
Expand All @@ -235,7 +230,6 @@ void GetDownpourSparseTableProto(
::paddle::distributed::PSParameter GetServerProto() {
// Generate server proto desc
::paddle::distributed::PSParameter server_fleet_desc;
server_fleet_desc.set_shard_num(127);
::paddle::distributed::ServerParameter* server_proto =
server_fleet_desc.mutable_server_param();
::paddle::distributed::DownpourServerParameter* downpour_server_proto =
Expand All @@ -256,7 +250,6 @@ ::paddle::distributed::PSParameter GetServerProto() {

::paddle::distributed::PSParameter GetWorkerProto() {
::paddle::distributed::PSParameter worker_fleet_desc;
worker_fleet_desc.set_shard_num(127);
::paddle::distributed::WorkerParameter* worker_proto =
worker_fleet_desc.mutable_worker_param();

Expand Down Expand Up @@ -344,6 +337,7 @@ void RunClient(
(paddle::distributed::GraphBrpcClient*)
paddle::distributed::PSClientFactory::create(worker_proto));
worker_ptr_->configure(worker_proto, dense_regions, _ps_env, 0);
worker_ptr_->set_shard_num(127);
worker_ptr_->set_local_channel(index);
worker_ptr_->set_local_graph_service(
(paddle::distributed::GraphBrpcService*)service);
Expand Down Expand Up @@ -379,16 +373,6 @@ void RunBrpcPushSparse() {
srand(time(0));
pull_status.wait();
std::vector<std::vector<std::pair<uint64_t, float>>> vs;
// for(int i = 0;i < 100000000;i++){
// std::vector<distributed::GraphNode> nodes;
// pull_status = worker_ptr_->pull_graph_list(0, 0, 0, 1, nodes);
// pull_status.wait();
// pull_status = worker_ptr_->batch_sample(0, std::vector<uint64_t>(1, 37), 4,
// vs);
// pull_status.wait();
// }
// std::vector<std::pair<uint64_t, float>> v;
// pull_status = worker_ptr_->sample(0, 37, 4, v);
testSampleNodes(worker_ptr_);
sleep(5);
testSingleSampleNeighboor(worker_ptr_);
Expand Down Expand Up @@ -560,7 +544,6 @@ void RunBrpcPushSparse() {

void testGraphToBuffer() {
::paddle::distributed::GraphNode s, s1;
// s.add_feature("hhhh");
s.set_feature_size(1);
s.set_feature(0, std::string("hhhh"));
s.set_id(65);
Expand Down

1 comment on commit 1a3013a

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🕵️ CI failures summary

🔍PR: #31226 Commit ID: 1a3013a contains failed CI.

Please sign in to comment.