loadGraphModel модель json с локального устройства ERROR React Native
Я использую модель loadGraphModel json с локального устройства ОШИБКА React Native Моя функция меня: КОД:
async function predictModellocal1() {
console.log("[+] Application started")
const tfReady = await tf.ready();
tf.device_util.isMobile = () => true
tf.device_util.isBrowser = () => false
console.log("[+] Loading face detection model")
const modelUrl =
'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';
console.log("[+] Loading face detection model")
const modelPath=
'D:\\Code\\ReactNative\\appAI-mobilenet\\assets\\facenet\\weights_manifest.json';
const modelPath1 = await require("./assets/facenet/weights_manifest.json");
const model = await tf.loadGraphModel(modelPath1);
console.log("[+] Loading model: done")
const imgB64 = await FileSystem.readAsStringAsync(selectedImage.localUri, {
encoding: FileSystem.EncodingType.Base64,
});
const imgBuffer = tf.util.encodeString(imgB64, 'base64').buffer;
const raw = new Uint8Array(imgBuffer)
const imageTensor = decodeJpeg(raw);
const img = tf.image.resizeBilinear(imageTensor, [224,224])
console.log(' Đã chạy')
const img4d = img.reshape([1, 224, 224, 3,]);
const prediction = await model.predict(img4d);
const kq = getTopKClasses(prediction, 5)
console.log(kq,'kq')
setTextshow('ok')
}
modelUrl работает хорошо. Я получаю модель из Google API.
const modelUrl ='https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json';
Но я хочу загрузитьGrapModel с локального компьютера, чтобы использовать его в автономном режиме:
const modelPath='D:\\Code\\ReactNative\\appAI-mobilenet\\assets\\facenet\\weights_manifest.json';
const modelPath1 = await require("./assets/facenet/weights_manifest.json");
Я использую Expo CLI для своего приложения. Спасибо всем
Error: EISDIR: illegal operation on a directory, read
at Object.readSync (fs.js:523:3)
at tryReadSync (fs.js:348:20)
at Object.readFileSync (fs.js:385:19)
at UnableToResolveError.buildCodeFrameMessage (D:\Code\ReactNative\appAI-mobilenet\node_modules\metro\src\node-haste\DependencyGraph\ModuleResolution.js:304:17)
at new UnableToResolveError (D:\Code\ReactNative\appAI-mobilenet\node_modules\metro\src\node-haste\DependencyGraph\ModuleResolution.js:290:35)
at ModuleResolver.resolveDependency (D:\Code\ReactNative\appAI-mobilenet\node_modules\metro\src\node-haste\DependencyGraph\ModuleResolution.js:168:15)
at DependencyGraph.resolveDependency (D:\Code\ReactNative\appAI-mobilenet\node_modules\metro\src\node-haste\DependencyGraph.js:353:43)
at D:\Code\ReactNative\appAI-mobilenet\node_modules\metro\src\lib\transformHelpers.js:271:42
at D:\Code\ReactNative\appAI-mobilenet\node_modules\metro\src\Server.js:1097:37
at Generator.next (<anonymous>)
[Unhandled promise rejection: TypeError: url.match is not a function. (In 'url.match(HTTPRequest.URL_SCHEME_REGEX)', 'url.match' is undefined)]
at http://192.168.31.235:19000/node_modules%5Cexpo%5CAppEntry.bundle?platform=ios&dev=true&hot=false&minify=false:184464:20 in isHTTPScheme
at [native code]:null in every
at httpRouter@http://192.168.31.235:19000/node_modules%5Cexpo%5CAppEntry.bundle?platform=ios&dev=true&hot=false&minify=false:184474:26 in <unknown>
1 ответ
Удалить
await
ключевое слово при установке пути к
modelPath1
:
const modelPath1 = require("./assets/facenet/weights_manifest.json");
const model = await tf.loadGraphModel(modelPath1);