HomeSort by relevance Sort by last modified time
    Searched refs:MPI (Results 1 - 23 of 23) sorted by null

  /external/tensorflow/tensorflow/contrib/mpi/
README.md 1 ## How to compile and use MPI-enabled TensorFlow
3 1. Follow the regular TF compilation instructions. During configure step, if you want MPI support, answer yes to this question:
5 ```Do you wish to build TensorFlow with MPI support [y/N]```
7 2. To turn on the MPI connection, add the protocol "grpc+mpi" in the server definition:
9 ```server = tf.train.Server(cluster, job_name="local", task_index=0, protocol='grpc+mpi') # default protocol is 'grpc'```
13 By using this protocol TensorFlow can take advantage of the high performance networking primitives that are offered via the MPI API. This enables TensorFlow to take advantage of high performance low latency networks such as Infiniband. These changes are largely transparent to the user who only has to change the offered protocol and launch the script using the 'mpirun' launcher. For example:
26 This environment variable allows you to disable the MPI path before launch (e.g. for performance or correctness testing).
31 This path is disabled by default as it requires that the MPI library can directly access the pointer to the data. For CPU backed buffers this is no problem, however for GPU backed buffers this requires MPI libraries that are built with CUDA support (CUDA Aware). When using non-CUDA aware MPI libraries a (…)
    [all...]
  /external/tensorflow/tensorflow/contrib/mpi_collectives/
__init__.py 16 """## Communicating Between Processes with MPI
23 gradient descent). This module implements a variety of MPI ops which can take
24 advantage of hardware-specific MPI libraries for efficient communication.
26 In order to use this module, TensorFlow must be built with an MPI library,
28 TensorFlow, you will need to build TensorFlow yourself to select the MPI
35 for detecting the running MPI configuration.
40 import tensorflow.contrib.mpi_collectives as mpi
42 # Use `mpi.Session` instead of `tf.Session`
43 with mpi.Session() as session:
44 rank = session.run(mpi.rank()
    [all...]
  /external/eigen/cmake/
FindPTSCOTCH.cmake 20 # - MPI
90 # PTSCOTCH depends on MPI, try to find it
93 find_package(MPI REQUIRED)
95 find_package(MPI)
285 # MPI
FindPastix.cmake 20 # - MPI
27 # - MPI: to activate detection of the parallel MPI version (default)
28 # it looks for Threads, HWLOC, BLAS, MPI and ScaLAPACK libraries
29 # - SEQ: to activate detection of the sequential version (exclude MPI version)
31 # it looks for MPI version of StarPU (default behaviour)
32 # if SEQ and STARPU are given, it looks for a StarPU without MPI
96 # means we look for the sequential version of PaStiX (without MPI)
100 if (${component} STREQUAL "MPI")
101 # means we look for the MPI version of PaStiX (default
    [all...]
  /external/llvm/lib/Transforms/Utils/
MemorySSA.cpp     [all...]
  /external/llvm/lib/Target/WebAssembly/
WebAssemblyRegStackify.cpp 142 const MachinePointerInfo &MPI = MMO->getPointerInfo();
143 if (MPI.V.is<const PseudoSourceValue *>()) {
144 auto PSV = MPI.V.get<const PseudoSourceValue *>();
    [all...]
  /prebuilts/clang/host/darwin-x86/clang-4393122/include/llvm/DebugInfo/CodeView/
TypeRecord.h 289 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
291 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/darwin-x86/clang-4479392/include/llvm/DebugInfo/CodeView/
TypeRecord.h 289 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
291 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/darwin-x86/clang-4579689/include/llvm/DebugInfo/CodeView/
TypeRecord.h 291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/darwin-x86/clang-4630689/include/llvm/DebugInfo/CodeView/
TypeRecord.h 291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/darwin-x86/clang-4639204/include/llvm/DebugInfo/CodeView/
TypeRecord.h 291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/darwin-x86/clang-4691093/include/llvm/DebugInfo/CodeView/
TypeRecord.h 291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/linux-x86/clang-4393122/include/llvm/DebugInfo/CodeView/
TypeRecord.h 289 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
291 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/linux-x86/clang-4479392/include/llvm/DebugInfo/CodeView/
TypeRecord.h 289 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
291 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/linux-x86/clang-4579689/include/llvm/DebugInfo/CodeView/
TypeRecord.h 291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/linux-x86/clang-4630689/include/llvm/DebugInfo/CodeView/
TypeRecord.h 291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/linux-x86/clang-4639204/include/llvm/DebugInfo/CodeView/
TypeRecord.h 291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /prebuilts/clang/host/linux-x86/clang-4691093/include/llvm/DebugInfo/CodeView/
TypeRecord.h 291 PointerOptions PO, uint8_t Size, const MemberPointerInfo &MPI)
293 Attrs(calcAttrs(PK, PM, PO, Size)), MemberInfo(MPI) {}
  /external/llvm/lib/Target/PowerPC/
PPCISelLowering.cpp     [all...]
PPCISelLowering.h 751 MachinePointerInfo MPI;
    [all...]
  /external/llvm/lib/CodeGen/AsmPrinter/
CodeViewDebug.cpp     [all...]
  /external/boringssl/src/crypto/fipsmodule/bn/
bn_test.cc 1039 const char *mpi; member in struct:MPITest
    [all...]
  /external/llvm/lib/CodeGen/SelectionDAG/
DAGCombiner.cpp     [all...]

Completed in 599 milliseconds