TensorFlow XLA
RPC
TensorFlow r1.9
2018.06.23/06.30/07.14
@Vengineer
TensorFlow XLAのロゴ
https://github.com/tensorflow/tensorflow/tree/master/tensorflow/compiler/xla
ブログ (2007年~) : Vengineerの戯言
 http://blogs.yahoo.co.jp/verification_engineer
SlideShare :
 https://www.slideshare.net/ssuser479fa3
Twitter (2009年~) :
@Vengineer
ソースコード解析職人
local_client
local_service backend
・CPU
・GPU
・Interpreter
・(Accelerator)
Service
Client
TensorFlow XLAの内部構成
compiler/xla/client/local_client.cc
StatusOr<std::unique_ptr<LocalExecutable>> LocalClient::Compile(
const XlaComputation& computation,
const tensorflow::gtl::ArraySlice<const Shape*> argument_layouts,
const ExecutableBuildOptions& options) {
ExecutableBuildOptions updated_options = options;
if (options.device_ordinal() == -1) {
updated_options.set_device_ordinal(default_device_ordinal());
VLOG(3) << "Set device ordinal to default value of: "
<< updated_options.device_ordinal();
}
TF_ASSIGN_OR_RETURN(std::unique_ptr<Executable> executable,
local_service_->CompileExecutable(
computation, argument_layouts, updated_options));
return WrapUnique(new LocalExecutable(std::move(executable),
local_service_->mutable_backend(),
updated_options));
}
local_service の CompileExecutable で
サービス側で computation をコンパイルして、
Executable にして返す。
compiler/xla/service/local_service.cc
StatusOr<std::unique_ptr<Executable>> LocalService:: CompileExecutable(
const XlaComputation& computation,
const tensorflow::gtl::ArraySlice<const Shape*> argument_layouts,
const ExecutableBuildOptions& build_options) {
const HloModuleProto& proto = computation.proto();
....
TF_ASSIGN_OR_RETURN(
se::StreamExecutor * executor,
execute_backend_->stream_executor(build_options.device_ordinal()));
return BuildExecutable(proto, std::move(module_config),
execute_backend_.get(), executor,
build_options.device_allocator());
}
CompileExecutable では、
Serviceの BuildExecutablleにて、
Executableを生成する。
compiler/xla/service/service.cc
StatusOr<std::vector<std::unique_ptr<Executable>>> Service::BuildExecutables(
const std::vector<const HloModuleProto*>& module_protos,
std::vector<std::unique_ptr<HloModuleConfig>> module_configs,
Backend* backend, std::vector<std::vector<se::StreamExecutor*>> executors,
DeviceMemoryAllocator* device_allocator) {
....
TF_ASSIGN_OR_RETURN(
std::vector<std::unique_ptr<Executable>> executables,
backend->compiler()->Compile(std::move(modules),
std::move(executors),
device_allocator));
....
return std::move(executables);
}
BuildExecutable では、
backendのcompilerにて、Compileし、
Executableを生成する。
XLAグラフに変換
最適化、その1
ターゲットハードウェアの
実行オブジェクト
ターゲットハードウェアに
依存しない最適化
HLO (High Level Optimizer)
XLAグラフ
最適化、その2
コード生成
ターゲットハードウェアに
依存する最適化
LLO (Low Level Optimizer)
TensorFow Graph
実行オブジェクト
XLAグラフ
LLVM Compiler::Compile (r1.5~)
RunHloPass
RunBackend
TensorFlow XLA RPC
tensorflow/compiler/xla/rpc
BUILD
grpc_service.h
grpc_service.cc
grpc_service_main.cc
grpc_client_test.cc
grpc_stub.h
grpc_stub.cc
xla_service.proto
grpcを利用して、
サーバー・クライアントを構成
 xla_service.proto => プロトコル
 grpc_service.{h,cc} => サービス
 grpc_service_main.cc => サーバー
 grpc_client_test.cc => クライアント
 (grpc_stub.{h,cc})
GRPCStub
GRPCService Service
・CPU
・GPU
・Interpreter
・Accelerator
Service
Client
テストプログラム
class GRPCClientTestBase : public ::testing::Test {
protected:
GRPCClientTestBase() {
string test_srcdir = tensorflow::testing::TensorFlowSrcRoot();
string service_main_path = tensorflow::io::JoinPath(
test_srcdir, "compiler/xla/rpc/grpc_service_main_cpu");
int port = tensorflow::internal::PickUnusedPortOrDie();
subprocess_.SetProgram(
service_main_path,
{service_main_path,
tensorflow::strings::Printf("--port=%d", port)});
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c
subprocess_.SetChannelAction(
tensorflow::CHAN_STDOUT,
tensorflow::ACTION_DUPPARENT);
subprocess_.SetChannelAction(
tensorflow::CHAN_STDERR,
tensorflow::ACTION_DUPPARENT);
CHECK(subprocess_.Start());
LOG(INFO) << "Launched subprocess";
子プロセスとして、
 を起動!
grpc_service_main
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c
CPU XLA版が動作する
auto channel =
::grpc::CreateChannel(tensorflow::strings::Printf("localhost:%d",
port),
::grpc::InsecureChannelCredentials());
channel->WaitForConnected(gpr_time_add(
gpr_now(GPR_CLOCK_REALTIME),
gpr_time_from_seconds(10, GPR_TIMESPAN)));
LOG(INFO) << "Channel to server is connected on port " << port;
xla_service_ = grpc::XlaService::NewStub(channel);
stub_.reset(new GRPCStub(xla_service_.get()));
client_.reset(new Client(stub_.get()));
}
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c
クライアント
参考:gRPC C++ Hello World Tutorial
サービス部分
class GreeterServiceImpl final : public Greeter::Service {
Status SayHello(ServerContext* context, const HelloRequest* request,
HelloReply* reply) override {
std::string prefix("Hello ");
reply->set_message(prefix + request->name());
return Status::OK;
}
};
https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
参考:gRPC C++ Hello World Tutorial
サーバー部分
void RunServer() {
std::string server_address("0.0.0.0:50051");
GreeterServiceImpl service;
ServerBuilder builder;
builder.AddListeningPort(server_address,
             grpc::InsecureServerCredentials());
builder.RegisterService(&service);
std::unique_ptr<Server> server(builder.BuildAndStart());
std::cout << "Server listening on " << server_address << std::endl;
server->Wait();
}
https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
参考:gRPC C++ Hello World Tutorial
サーバーのmain関数
int main(int argc, char** argv) {
RunServer();
return 0;
}
https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
参考:gRPC C++ Hello World Tutorial
クライアントのコール部分
std::string SayHello(const std::string& user) {
// Data we are sending to the server.
HelloRequest request;
request.set_name(user);
ClientContext context;
Status status = stub_->SayHello(&context, request, &reply);
if (status.ok()) {
return reply.message();
} else {
std::cout << status.error_code() << ": " << status.error_message()
<< std::endl;
return "RPC failed";
}
}
https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
参考:gRPC C++ Hello World Tutorial
クライアントのmain関数
int main(int argc, char** argv) {
GreeterClient greeter(grpc::CreateChannel(
"localhost:50051", grpc::InsecureChannelCredentials()));
std::string user("world");
std::string reply = greeter.SayHello(user);
std::cout << "Greeter received: " << reply << std::endl;
return 0;
}
https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
サービスプロトコル
xla_service_ = grpc::XlaService::NewStub(channel);
service XlaService {
/////////////////////////
// Global data requests
// Unregisters a global allocation.
//
// If the handle given is not currently allocated, a NOT_FOUND status is
// returned.
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/xla_service.proto
サーバー側
// Service implementation which wraps a XLA Service with a GRPC interface.
class GRPCService : public grpc::XlaService::Service {
public:
// Factory for creating a RPCService. The parameter platform is the platform
// that the service should target. If platform is null then the default
// platform is used.
static StatusOr<std::unique_ptr<GRPCService>> NewService(
se::Platform* platform = nullptr);
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service.h
サービス
/* static */
StatusOr<std::unique_ptr<GRPCService>> GRPCService::NewService(
se::Platform* platform) {
std::unique_ptr<GRPCService> grpc_service(new GRPCService());
TF_ASSIGN_OR_RETURN(grpc_service->service_,
::xla::Service::NewService(platform));
return std::move(grpc_service);
}
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service.h
int main(int argc, char** argv) {
return xla::RealMain(argc, argv);
}
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service_main.cc
サーバー
int RealMain(int argc, char** argv) {
int32 port = 1685;
std::vector<tensorflow::Flag> flag_list = {
tensorflow::Flag("port", &port, "port to listen on"),
};
string usage = tensorflow::Flags::Usage(argv[0], flag_list);
bool parsed_values_ok = tensorflow::Flags::Parse(&argc, argv, flag_list);
if (!parsed_values_ok) {
LOG(ERROR) << usage;
return 2;
}
tensorflow::port::InitMain(argv[0], &argc, &argv);
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service_main.cc
std::unique_ptr<xla::GRPCService> service =
xla::GRPCService::NewService().ConsumeValueOrDie();
::grpc::ServerBuilder builder;
builder.AddListeningPort(server_address,
::grpc::InsecureServerCredentials());
builder.RegisterService(service.get());
std::unique_ptr<::grpc::Server> server(builder.BuildAndStart());
server->Wait();
return 0;
}
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service_main.cc
クライアント側
TEST_F(GRPCClientTestBase, AxpyTenValues) {
XlaBuilder builder("axpy_10");
auto alpha = ConstantR0<float>(&builder, 3.1415926535);
auto x = ConstantR1<float>(
&builder, {-1.0, 1.0, 2.0, -2.0, -3.0, 3.0, 4.0, -4.0, -5.0, 5.0});
auto y = ConstantR1<float>(
&builder, {5.0, -5.0, -4.0, 4.0, 3.0, -3.0, -2.0, 2.0, 1.0, -1.0});
auto ax = Mul(alpha, x);
Add(ax, y);
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c
クライアント
axpy = x * alpha + y
std::vector<float> expected = {
1.85840735, -1.85840735, 2.28318531, -2.28318531, -6.42477796,
6.42477796, 10.56637061, -10.56637061, -14.70796327, 14.70796327};
std::unique_ptr<Literal> expected_literal =
Literal::CreateR1<float>(expected);
auto computation = builder.Build();
auto result_literal =
client_->ExecuteAndTransfer(computation, {}, nullptr));
EXPECT_TRUE(LiteralTestUtil::Near(
*expected_literal, *result_literal, ErrorSpec(0.0001)));
}
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c
RPCを実行
class GRPCClientTestBase : public ::testing::Test {
protected:
GRPCClientTestBase() {
…..
xla_service_ = grpc::XlaService::NewStub(channel);
stub_.reset(new GRPCStub(xla_service_.get()));
client_.reset(new Client(stub_.get()));
}
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c
ブログ (2007年~) : Vengineerの戯言
 http://blogs.yahoo.co.jp/verification_engineer
SlideShare :
 https://www.slideshare.net/ssuser479fa3
ありがとうございました
Twitter (2009年~) :
@Vengineer
ソースコード解析職人

TensorFlow XLA RPC

  • 1.
    TensorFlow XLA RPC TensorFlow r1.9 2018.06.23/06.30/07.14 @Vengineer TensorFlowXLAのロゴ https://github.com/tensorflow/tensorflow/tree/master/tensorflow/compiler/xla
  • 2.
    ブログ (2007年~) :Vengineerの戯言  http://blogs.yahoo.co.jp/verification_engineer SlideShare :  https://www.slideshare.net/ssuser479fa3 Twitter (2009年~) : @Vengineer ソースコード解析職人
  • 3.
  • 4.
    compiler/xla/client/local_client.cc StatusOr<std::unique_ptr<LocalExecutable>> LocalClient::Compile( const XlaComputation&computation, const tensorflow::gtl::ArraySlice<const Shape*> argument_layouts, const ExecutableBuildOptions& options) { ExecutableBuildOptions updated_options = options; if (options.device_ordinal() == -1) { updated_options.set_device_ordinal(default_device_ordinal()); VLOG(3) << "Set device ordinal to default value of: " << updated_options.device_ordinal(); } TF_ASSIGN_OR_RETURN(std::unique_ptr<Executable> executable, local_service_->CompileExecutable( computation, argument_layouts, updated_options)); return WrapUnique(new LocalExecutable(std::move(executable), local_service_->mutable_backend(), updated_options)); } local_service の CompileExecutable で サービス側で computation をコンパイルして、 Executable にして返す。
  • 5.
    compiler/xla/service/local_service.cc StatusOr<std::unique_ptr<Executable>> LocalService:: CompileExecutable( constXlaComputation& computation, const tensorflow::gtl::ArraySlice<const Shape*> argument_layouts, const ExecutableBuildOptions& build_options) { const HloModuleProto& proto = computation.proto(); .... TF_ASSIGN_OR_RETURN( se::StreamExecutor * executor, execute_backend_->stream_executor(build_options.device_ordinal())); return BuildExecutable(proto, std::move(module_config), execute_backend_.get(), executor, build_options.device_allocator()); } CompileExecutable では、 Serviceの BuildExecutablleにて、 Executableを生成する。
  • 6.
    compiler/xla/service/service.cc StatusOr<std::vector<std::unique_ptr<Executable>>> Service::BuildExecutables( const std::vector<constHloModuleProto*>& module_protos, std::vector<std::unique_ptr<HloModuleConfig>> module_configs, Backend* backend, std::vector<std::vector<se::StreamExecutor*>> executors, DeviceMemoryAllocator* device_allocator) { .... TF_ASSIGN_OR_RETURN( std::vector<std::unique_ptr<Executable>> executables, backend->compiler()->Compile(std::move(modules), std::move(executors), device_allocator)); .... return std::move(executables); } BuildExecutable では、 backendのcompilerにて、Compileし、 Executableを生成する。
  • 7.
    XLAグラフに変換 最適化、その1 ターゲットハードウェアの 実行オブジェクト ターゲットハードウェアに 依存しない最適化 HLO (High LevelOptimizer) XLAグラフ 最適化、その2 コード生成 ターゲットハードウェアに 依存する最適化 LLO (Low Level Optimizer) TensorFow Graph 実行オブジェクト XLAグラフ LLVM Compiler::Compile (r1.5~) RunHloPass RunBackend
  • 8.
  • 9.
  • 10.
  • 11.
  • 12.
  • 13.
    class GRPCClientTestBase :public ::testing::Test { protected: GRPCClientTestBase() { string test_srcdir = tensorflow::testing::TensorFlowSrcRoot(); string service_main_path = tensorflow::io::JoinPath( test_srcdir, "compiler/xla/rpc/grpc_service_main_cpu"); int port = tensorflow::internal::PickUnusedPortOrDie(); subprocess_.SetProgram( service_main_path, {service_main_path, tensorflow::strings::Printf("--port=%d", port)}); https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c
  • 14.
    subprocess_.SetChannelAction( tensorflow::CHAN_STDOUT, tensorflow::ACTION_DUPPARENT); subprocess_.SetChannelAction( tensorflow::CHAN_STDERR, tensorflow::ACTION_DUPPARENT); CHECK(subprocess_.Start()); LOG(INFO) << "Launchedsubprocess"; 子プロセスとして、  を起動! grpc_service_main https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c CPU XLA版が動作する
  • 15.
    auto channel = ::grpc::CreateChannel(tensorflow::strings::Printf("localhost:%d", port), ::grpc::InsecureChannelCredentials()); channel->WaitForConnected(gpr_time_add( gpr_now(GPR_CLOCK_REALTIME), gpr_time_from_seconds(10,GPR_TIMESPAN))); LOG(INFO) << "Channel to server is connected on port " << port; xla_service_ = grpc::XlaService::NewStub(channel); stub_.reset(new GRPCStub(xla_service_.get())); client_.reset(new Client(stub_.get())); } https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c クライアント
  • 16.
    参考:gRPC C++ HelloWorld Tutorial サービス部分 class GreeterServiceImpl final : public Greeter::Service { Status SayHello(ServerContext* context, const HelloRequest* request, HelloReply* reply) override { std::string prefix("Hello "); reply->set_message(prefix + request->name()); return Status::OK; } }; https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
  • 17.
    参考:gRPC C++ HelloWorld Tutorial サーバー部分 void RunServer() { std::string server_address("0.0.0.0:50051"); GreeterServiceImpl service; ServerBuilder builder; builder.AddListeningPort(server_address,              grpc::InsecureServerCredentials()); builder.RegisterService(&service); std::unique_ptr<Server> server(builder.BuildAndStart()); std::cout << "Server listening on " << server_address << std::endl; server->Wait(); } https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
  • 18.
    参考:gRPC C++ HelloWorld Tutorial サーバーのmain関数 int main(int argc, char** argv) { RunServer(); return 0; } https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
  • 19.
    参考:gRPC C++ HelloWorld Tutorial クライアントのコール部分 std::string SayHello(const std::string& user) { // Data we are sending to the server. HelloRequest request; request.set_name(user); ClientContext context; Status status = stub_->SayHello(&context, request, &reply); if (status.ok()) { return reply.message(); } else { std::cout << status.error_code() << ": " << status.error_message() << std::endl; return "RPC failed"; } } https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
  • 20.
    参考:gRPC C++ HelloWorld Tutorial クライアントのmain関数 int main(int argc, char** argv) { GreeterClient greeter(grpc::CreateChannel( "localhost:50051", grpc::InsecureChannelCredentials())); std::string user("world"); std::string reply = greeter.SayHello(user); std::cout << "Greeter received: " << reply << std::endl; return 0; } https://github.com/grpc/grpc/tree/master/examples/cpp/helloworld
  • 21.
  • 22.
    xla_service_ = grpc::XlaService::NewStub(channel); serviceXlaService { ///////////////////////// // Global data requests // Unregisters a global allocation. // // If the handle given is not currently allocated, a NOT_FOUND status is // returned. https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/xla_service.proto
  • 23.
  • 24.
    // Service implementationwhich wraps a XLA Service with a GRPC interface. class GRPCService : public grpc::XlaService::Service { public: // Factory for creating a RPCService. The parameter platform is the platform // that the service should target. If platform is null then the default // platform is used. static StatusOr<std::unique_ptr<GRPCService>> NewService( se::Platform* platform = nullptr); https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service.h サービス
  • 25.
    /* static */ StatusOr<std::unique_ptr<GRPCService>>GRPCService::NewService( se::Platform* platform) { std::unique_ptr<GRPCService> grpc_service(new GRPCService()); TF_ASSIGN_OR_RETURN(grpc_service->service_, ::xla::Service::NewService(platform)); return std::move(grpc_service); } https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service.h
  • 26.
    int main(int argc,char** argv) { return xla::RealMain(argc, argv); } https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service_main.cc サーバー
  • 27.
    int RealMain(int argc,char** argv) { int32 port = 1685; std::vector<tensorflow::Flag> flag_list = { tensorflow::Flag("port", &port, "port to listen on"), }; string usage = tensorflow::Flags::Usage(argv[0], flag_list); bool parsed_values_ok = tensorflow::Flags::Parse(&argc, argv, flag_list); if (!parsed_values_ok) { LOG(ERROR) << usage; return 2; } tensorflow::port::InitMain(argv[0], &argc, &argv); https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service_main.cc
  • 28.
    std::unique_ptr<xla::GRPCService> service = xla::GRPCService::NewService().ConsumeValueOrDie(); ::grpc::ServerBuilderbuilder; builder.AddListeningPort(server_address, ::grpc::InsecureServerCredentials()); builder.RegisterService(service.get()); std::unique_ptr<::grpc::Server> server(builder.BuildAndStart()); server->Wait(); return 0; } https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_service_main.cc
  • 29.
  • 30.
    TEST_F(GRPCClientTestBase, AxpyTenValues) { XlaBuilderbuilder("axpy_10"); auto alpha = ConstantR0<float>(&builder, 3.1415926535); auto x = ConstantR1<float>( &builder, {-1.0, 1.0, 2.0, -2.0, -3.0, 3.0, 4.0, -4.0, -5.0, 5.0}); auto y = ConstantR1<float>( &builder, {5.0, -5.0, -4.0, 4.0, 3.0, -3.0, -2.0, 2.0, 1.0, -1.0}); auto ax = Mul(alpha, x); Add(ax, y); https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c クライアント axpy = x * alpha + y
  • 31.
    std::vector<float> expected ={ 1.85840735, -1.85840735, 2.28318531, -2.28318531, -6.42477796, 6.42477796, 10.56637061, -10.56637061, -14.70796327, 14.70796327}; std::unique_ptr<Literal> expected_literal = Literal::CreateR1<float>(expected); auto computation = builder.Build(); auto result_literal = client_->ExecuteAndTransfer(computation, {}, nullptr)); EXPECT_TRUE(LiteralTestUtil::Near( *expected_literal, *result_literal, ErrorSpec(0.0001))); } https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c RPCを実行
  • 32.
    class GRPCClientTestBase :public ::testing::Test { protected: GRPCClientTestBase() { ….. xla_service_ = grpc::XlaService::NewStub(channel); stub_.reset(new GRPCStub(xla_service_.get())); client_.reset(new Client(stub_.get())); } https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/rpc/grpc_client_test.c
  • 33.
    ブログ (2007年~) :Vengineerの戯言  http://blogs.yahoo.co.jp/verification_engineer SlideShare :  https://www.slideshare.net/ssuser479fa3 ありがとうございました Twitter (2009年~) : @Vengineer ソースコード解析職人