Skip to content

Commit

Permalink
add paddle CU110 (#1604)
Browse files Browse the repository at this point in the history
  • Loading branch information
Lanking authored Apr 26, 2022
1 parent 40c4186 commit 3953410
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 7 deletions.
14 changes: 14 additions & 0 deletions engines/paddlepaddle/paddlepaddle-engine/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -114,3 +114,17 @@ LD_LIBRARY_PATH=$HOME/.djl.ai/paddle/2.2.2-<cuda-flavor>-linux-x86_64
<scope>runtime</scope>
</dependency>
```

#### Windows GPU (Experimental)

- ai.djl.paddlepaddle:paddlepaddle-native-cu110:2.2.2:win-x86_64

```xml
<dependency>
<groupId>ai.djl.paddlepaddle</groupId>
<artifactId>paddlepaddle-native-cu110</artifactId>
<classifier>win-x86_64</classifier>
<version>2.2.2</version>
<scope>runtime</scope>
</dependency>
```
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
Expand Down Expand Up @@ -107,12 +106,17 @@ public static void loadLinuxDependencies(String libName) {

public static void loadWindowsDependencies(String libName) {
Path libDir = Paths.get(libName).getParent();
List<String> names = Collections.singletonList("openblas.dll");
List<String> names = Arrays.asList("openblas.dll", "mkldnn.dll");
names.forEach(
name -> {
String lib = libDir.resolve(name).toAbsolutePath().toString();
logger.debug("Now loading " + lib);
System.load(libDir.resolve(name).toAbsolutePath().toString());
Path path = libDir.resolve(name);
if (Files.isRegularFile(path)) {
String lib = path.toAbsolutePath().toString();
logger.debug("Now loading " + lib);
System.load(lib);
} else {
logger.debug(name + " is not found, skip loading...");
}
});
}

Expand Down
2 changes: 2 additions & 0 deletions engines/paddlepaddle/paddlepaddle-native/build.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ set FILEPATH="paddle"

if "%1" == "cpu" (
set DOWNLOAD_URL="https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Windows/CPU/x86-64_vs2017_avx_openblas/paddle_inference.zip"
) else if "%1" == "cu110" (
set DOWNLOAD_URL="https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Windows/GPU/x86-64_vs2017_avx_mkl_cuda11.0_cudnn8/paddle_inference.zip"
)

if exist %FILEPATH% (
Expand Down
6 changes: 4 additions & 2 deletions engines/paddlepaddle/paddlepaddle-native/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ task prepareNativeLibs() {
"cu102/linux": "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda10.2_cudnn8.1.1_trt7.2.3.4/paddle_inference.tgz",
"cu112/linux": "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Linux/GPU/x86-64_gcc5.4_avx_mkl_cuda11.2_cudnn8.2.1_trt8.0.3.4/paddle_inference.tgz",
"cpu/osx" : "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/MacOS/CPU/x86-64_clang_avx_openb/paddle_inference_install_dir.tgz",
"cpu/win" : "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Windows/CPU/x86-64_vs2017_avx_openblas/paddle_inference.zip"
"cpu/win" : "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Windows/CPU/x86-64_vs2017_avx_openblas/paddle_inference.zip",
"cu110/win" : "https://paddle-inference-lib.bj.bcebos.com/2.2.2/cxx_c/Windows/GPU/x86-64_vs2017_avx_mkl_cuda11.0_cudnn8/paddle_inference.zip"
]

def downloadDir = file("${buildDir}/download")
Expand Down Expand Up @@ -122,7 +123,8 @@ task uploadS3 {
"${buildDir}/native/cu102/linux/native/lib/",
"${buildDir}/native/cu112/linux/native/lib/",
"${buildDir}/native/cpu/osx/native/lib/",
"${buildDir}/native/cpu/win/native/lib/"
"${buildDir}/native/cpu/win/native/lib/",
"${buildDir}/native/cu110/win/native/lib/"
]
uploadDirs.each { item ->
fileTree(item).files.name.each {
Expand Down

0 comments on commit 3953410

Please sign in to comment.