run method

  1. @override
Future<void> run()
override

the SimpleFrameApp subclass implements application-specific code

Implementation

@override
Future<void> run() async {
  currentState = ApplicationState.running;
  if (mounted) setState(() {});

  // keep looping, taking photos and displaying, until user clicks cancel
  while (currentState == ApplicationState.running) {
    try {
      // the image data as a list of bytes that accumulates with each packet
      ImageMetadata meta = ImageMetadata(
          _qualityValues[_qualityIndex].toInt(),
          _autoExpGainTimes,
          _meteringValues[_meteringIndex],
          _exposure,
          _exposureSpeed,
          _shutterLimit,
          _analogGainLimit,
          _whiteBalanceSpeed);

      // send the lua command to request a photo from the Frame
      _stopwatch.reset();
      _stopwatch.start();
      await frame!.sendMessage(TxCameraSettings(
        msgCode: 0x0d,
        qualityIndex: _qualityIndex,
        autoExpGainTimes: _autoExpGainTimes,
        meteringIndex: _meteringIndex,
        exposure: _exposure,
        exposureSpeed: _exposureSpeed,
        shutterLimit: _shutterLimit,
        analogGainLimit: _analogGainLimit,
        whiteBalanceSpeed: _whiteBalanceSpeed,
      ));

      // synchronously await the image response
      Uint8List imageData =
          await RxPhoto(qualityLevel: _qualityValues[_qualityIndex].toInt())
              .attach(frame!.dataResponse)
              .first;

      // received a whole-image Uint8List with jpeg header and footer included
      _stopwatch.stop();
      _log.fine(
          'Image file size in bytes: ${imageData.length}, elapsedMs: ${_stopwatch.elapsedMilliseconds}');

      try {
        // Decode image using package:image/image.dart (https://pub.flutter-io.cn/packages/image)
        image_lib.Image? im = image_lib.decodeJpg(imageData);

        if (im != null) {
          // Frame camera is rotated 90 degrees clockwise, so make it upright for image processing
          im = image_lib.copyRotate(im, angle: 270);

          // Perform vision processing pipeline
          // send image to classifier, produce some candidate classes (https://pub.flutter-io.cn/packages/tflite_flutter)
          Map<String, double> classification =
              await _imageClassificationHelper.inferenceImage(im);

          // classification map is unordered and can be long, sort it and pick the best 3 here
          _top3 = (classification.entries.toList()
                ..sort(
                  (a, b) => a.value.compareTo(b.value),
                ))
              .reversed
              .take(3)
              .toList()
              .fold<String>(
                  '',
                  (previousValue, element) =>
                      '$previousValue\n${element.key}: ${element.value.toStringAsFixed(2)}')
              .trim();

          _log.fine('Classification result: $_top3');

          // Frame display
          await frame!.sendMessage(TxPlainText(msgCode: 0x0a, text: _top3));

          // UI display
          Image imWidget = Image.memory(
            image_lib.encodeJpg(im),
            gaplessPlayback: true,
          );

          // add the size and elapsed time to the image metadata widget
          meta.size = imageData.length;
          meta.elapsedTimeMs = _stopwatch.elapsedMilliseconds;

          setState(() {
            _image = imWidget;
            _imageMeta = meta;
          });
        }
      } catch (e) {
        _log.severe('Error converting bytes to image: $e');
      }
    } catch (e) {
      _log.severe('Error executing application: $e');
    }
  }
}