Skip to content

Commit

Permalink
refacotor dir
Browse files Browse the repository at this point in the history
  • Loading branch information
calcitem committed Oct 27, 2024
1 parent 3cff6f1 commit 390dd2c
Show file tree
Hide file tree
Showing 6 changed files with 53 additions and 53 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import 'dart:math' as math;

/// Main configuration class, containing all settings related to image processing.
class ImageProcessingConfig {
ImageProcessingConfig._(); // Private constructor to prevent instantiation
class ProcessingConfig {
ProcessingConfig._(); // Private constructor to prevent instantiation

// Static instances of various configurations
static GammaConfig gammaConfig = GammaConfig(gamma: 1.2);
Expand Down Expand Up @@ -157,7 +157,7 @@ class UIConfig {
UIConfig._(); // Private constructor to prevent instantiation

// App title
final String appBarTitle = "Nine Men's Morris Recognition";
final String appBarTitle = "Mill Board Recognition";

// Button text
final String selectAndProcessImageButton = "Select and Process Image";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@

import 'dart:math' as math;
import 'package:opencv_dart/opencv_dart.dart' as cv;
import 'image_processing_config.dart';
import 'image_to_fen_page.dart';
import '../models/processing_config.dart';
import '../widgets/recognition_page.dart';

class Line {
Line(this.startPoint, this.endPoint);
Expand All @@ -29,21 +29,21 @@ List<Line> filterLines(cv.Mat lines) {
.abs();

if ((angle - 0).abs() <
ImageProcessingConfig.houghTransformConfig.angleTolerance ||
ProcessingConfig.houghTransformConfig.angleTolerance ||
(angle - math.pi).abs() <
ImageProcessingConfig.houghTransformConfig.angleTolerance) {
ProcessingConfig.houghTransformConfig.angleTolerance) {
horizontalLines.add(line);
} else if ((angle - math.pi / 2).abs() <
ImageProcessingConfig.houghTransformConfig.angleTolerance) {
ProcessingConfig.houghTransformConfig.angleTolerance) {
verticalLines.add(line);
}
}

return <Line>[
...removeDuplicateLines(horizontalLines,
ImageProcessingConfig.houghTransformConfig.distanceThreshold),
...removeDuplicateLines(verticalLines,
ImageProcessingConfig.houghTransformConfig.distanceThreshold),
ProcessingConfig.houghTransformConfig.distanceThreshold),
...removeDuplicateLines(
verticalLines, ProcessingConfig.houghTransformConfig.distanceThreshold),
];
}

Expand Down Expand Up @@ -176,11 +176,11 @@ cv.Mat warpPerspective(cv.Mat mat, cv.VecPoint contour) {
edges,
1,
math.pi / 180,
ImageProcessingConfig
ProcessingConfig
.houghTransformConfig.threshold, // Adjustable Hough threshold
minLineLength: ImageProcessingConfig
minLineLength: ProcessingConfig
.houghTransformConfig.minLineLength, // Adjustable min line length
maxLineGap: ImageProcessingConfig
maxLineGap: ProcessingConfig
.houghTransformConfig.maxLineGap, // Adjustable max line gap
);

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import 'package:opencv_dart/core.dart';
import 'package:opencv_dart/opencv_dart.dart' as cv;

import '../shared/services/environment_config.dart';
import '../shared/services/logger.dart';
import '../../shared/services/environment_config.dart';
import '../../shared/services/logger.dart';

List<String> detectPieces(cv.Mat warped) {
final List<String> positions = List<String>.filled(24, 'X');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,27 +8,27 @@ import 'package:image/image.dart' as img;
import 'package:image_picker/image_picker.dart';
import 'package:opencv_dart/opencv_dart.dart' as cv;

import '../../shared/services/logger.dart';
import '../shared/services/environment_config.dart';
import 'board_detection.dart';
import 'fen_generator.dart';
import 'image_processing_config.dart';
import 'piece_detection.dart';
import '../../../shared/services/logger.dart';
import '../../shared/services/environment_config.dart';
import '../models/processing_config.dart';
import '../services/board_detector.dart';
import '../services/fen_builder.dart';
import '../services/piece_detector.dart';

class PointWithAngle {
PointWithAngle(this.point, this.angle);
final cv.Point point;
final double angle;
}

class ImageToFenApp extends StatefulWidget {
const ImageToFenApp({super.key});
class RecognitionPage extends StatefulWidget {
const RecognitionPage({super.key});

@override
ImageToFenAppState createState() => ImageToFenAppState();
RecognitionPageState createState() => RecognitionPageState();
}

class ImageToFenAppState extends State<ImageToFenApp> {
class RecognitionPageState extends State<RecognitionPage> {
Uint8List? _debugImage;
Uint8List? _processedImage;
Uint8List? grayImage;
Expand Down Expand Up @@ -102,7 +102,7 @@ class ImageToFenAppState extends State<ImageToFenApp> {
logger.i('Number of grayscale image pixels: ${grayPixels.length}');

// Use adjustable gamma value
final double inverseGamma = 1.0 / ImageProcessingConfig.gammaConfig.gamma;
final double inverseGamma = 1.0 / ProcessingConfig.gammaConfig.gamma;
logger.i('Using inverse gamma value: $inverseGamma');

// Generate LUT for faster gamma transformation
Expand Down Expand Up @@ -143,7 +143,7 @@ class ImageToFenAppState extends State<ImageToFenApp> {
// Apply Gaussian blur
logger.i('Applying Gaussian blur');
final cv.Mat blurred = cv.gaussianBlur(
enhanced, ImageProcessingConfig.parameters.gaussianKernelSize, 0);
enhanced, ProcessingConfig.parameters.gaussianKernelSize, 0);
logger.i(
'Gaussian blur applied, Blurred Mat dimensions: ${blurred.rows}x${blurred.cols}');

Expand All @@ -159,16 +159,16 @@ class ImageToFenAppState extends State<ImageToFenApp> {
255,
cv.ADAPTIVE_THRESH_GAUSSIAN_C,
cv.THRESH_BINARY_INV,
ImageProcessingConfig.adaptiveThresholdConfig.blockSize,
ImageProcessingConfig.adaptiveThresholdConfig.c,
ProcessingConfig.adaptiveThresholdConfig.blockSize,
ProcessingConfig.adaptiveThresholdConfig.c,
);
logger.i(
'Adaptive thresholding completed, Thresh Mat dimensions: ${thresh.rows}x${thresh.cols}');

// Apply morphological closing to connect broken edges
logger.i('Applying morphological closing to connect broken edges');
final cv.Mat kernel = cv.getStructuringElement(
cv.MORPH_RECT, ImageProcessingConfig.parameters.morphologyKernelSize);
cv.MORPH_RECT, ProcessingConfig.parameters.morphologyKernelSize);
final cv.Mat closed = cv.morphologyEx(thresh, cv.MORPH_CLOSE, kernel);
logger.i(
'Morphological closing completed, Closed Mat dimensions: ${closed.rows}x${closed.cols}');
Expand Down Expand Up @@ -231,9 +231,9 @@ class ImageToFenAppState extends State<ImageToFenApp> {
logger.i('Contour[$idx] area: $area');

// Filter based on area
if (area < ImageProcessingConfig.contourConfig.areaThreshold) {
if (area < ProcessingConfig.contourConfig.areaThreshold) {
logger.i(
'Contour[$idx] area below threshold ${ImageProcessingConfig.contourConfig.areaThreshold}, skipping');
'Contour[$idx] area below threshold ${ProcessingConfig.contourConfig.areaThreshold}, skipping');
continue;
}

Expand All @@ -242,7 +242,7 @@ class ImageToFenAppState extends State<ImageToFenApp> {

final cv.VecPoint approx = cv.approxPolyDP(
contour,
ImageProcessingConfig.contourConfig.epsilonMultiplier * peri,
ProcessingConfig.contourConfig.epsilonMultiplier * peri,
true,
); // Use adjustable epsilon
logger.i(
Expand Down Expand Up @@ -273,10 +273,10 @@ class ImageToFenAppState extends State<ImageToFenApp> {
final double aspectRatio = rect.width / rect.height;
logger.i('Contour[$idx] aspect ratio: $aspectRatio');

if (aspectRatio > ImageProcessingConfig.contourConfig.aspectRatioMin &&
aspectRatio < ImageProcessingConfig.contourConfig.aspectRatioMax) {
if (aspectRatio > ProcessingConfig.contourConfig.aspectRatioMin &&
aspectRatio < ProcessingConfig.contourConfig.aspectRatioMax) {
logger.i(
'Contour[$idx] aspect ratio within acceptable range (${ImageProcessingConfig.contourConfig.aspectRatioMin} - ${ImageProcessingConfig.contourConfig.aspectRatioMax})');
'Contour[$idx] aspect ratio within acceptable range (${ProcessingConfig.contourConfig.aspectRatioMin} - ${ProcessingConfig.contourConfig.aspectRatioMax})');
// Use adjustable aspect ratio
if (area > maxArea) {
logger.i(
Expand All @@ -289,7 +289,7 @@ class ImageToFenAppState extends State<ImageToFenApp> {
}
} else {
logger.i(
'Contour[$idx] aspect ratio outside acceptable range (${ImageProcessingConfig.contourConfig.aspectRatioMin} - ${ImageProcessingConfig.contourConfig.aspectRatioMax})');
'Contour[$idx] aspect ratio outside acceptable range (${ProcessingConfig.contourConfig.aspectRatioMin} - ${ProcessingConfig.contourConfig.aspectRatioMax})');
}
} else {
logger.i(
Expand Down Expand Up @@ -318,11 +318,11 @@ class ImageToFenAppState extends State<ImageToFenApp> {
boardContours,
-1,
cv.Scalar(
ImageProcessingConfig.parameters.drawContoursColor.$1.toDouble(),
ImageProcessingConfig.parameters.drawContoursColor.$2.toDouble(),
ImageProcessingConfig.parameters.drawContoursColor.$3.toDouble(),
ProcessingConfig.parameters.drawContoursColor.$1.toDouble(),
ProcessingConfig.parameters.drawContoursColor.$2.toDouble(),
ProcessingConfig.parameters.drawContoursColor.$3.toDouble(),
), // Using configured color
thickness: ImageProcessingConfig.parameters.drawContoursThickness,
thickness: ProcessingConfig.parameters.drawContoursThickness,
);
logger.i('Contour drawing completed');

Expand Down Expand Up @@ -432,10 +432,10 @@ class ImageToFenAppState extends State<ImageToFenApp> {
warped.dispose();
warpedWithLines.dispose();
} else {
logger.i("Nine Men's Morris Board not detected");
logger.i("Mill Board not detected");
setState(() {
_debugImage = cv.imencode('.png', matWithContours).$2;
_fenString = "Nine Men's Morris board not detected";
_fenString = "Mill board not detected";
});
}

Expand All @@ -457,7 +457,7 @@ class ImageToFenAppState extends State<ImageToFenApp> {

@override
Widget build(BuildContext context) {
final UIConfig uiConfig = ImageProcessingConfig.uiConfig;
final UIConfig uiConfig = ProcessingConfig.uiConfig;

return MaterialApp(
home: Scaffold(
Expand Down Expand Up @@ -528,7 +528,7 @@ class ImageToFenAppState extends State<ImageToFenApp> {
padding: const EdgeInsets.all(16.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: ImageProcessingConfig.uiConfig.sliders
children: ProcessingConfig.uiConfig.sliders
.map((SliderConfig sliderConfig) {
return Padding(
padding: const EdgeInsets.only(bottom: 16.0),
Expand All @@ -537,23 +537,23 @@ class ImageToFenAppState extends State<ImageToFenApp> {
children: <Widget>[
// Slider title
Text(
'${sliderConfig.labelPrefix}${sliderConfig.labelFormatter(ImageProcessingConfig.getSliderValue(sliderConfig.configKey))}',
'${sliderConfig.labelPrefix}${sliderConfig.labelFormatter(ProcessingConfig.getSliderValue(sliderConfig.configKey))}',
style:
const TextStyle(fontWeight: FontWeight.bold),
),
// Slider itself
Slider(
value: ImageProcessingConfig.getSliderValue(
value: ProcessingConfig.getSliderValue(
sliderConfig.configKey),
min: sliderConfig.min,
max: sliderConfig.max,
divisions: sliderConfig.divisions,
label: sliderConfig.labelFormatter(
ImageProcessingConfig.getSliderValue(
ProcessingConfig.getSliderValue(
sliderConfig.configKey)),
onChanged: (double value) {
setState(() {
ImageProcessingConfig.updateConfig(
ProcessingConfig.updateConfig(
sliderConfig.configKey, value);
});
},
Expand Down
6 changes: 3 additions & 3 deletions src/ui/flutter_app/lib/game_page/widgets/play_area.dart
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ import 'package:fluentui_system_icons/fluentui_system_icons.dart';
import 'package:flutter/material.dart';
import 'package:native_screenshot_widget/native_screenshot_widget.dart';

import '../../board_recognition/widgets/recognition_page.dart';
import '../../general_settings/widgets/general_settings_page.dart';
import '../../generated/intl/l10n.dart';
import '../../image_to_fen/image_to_fen_page.dart';
import '../../shared/config/constants.dart';
import '../../shared/database/database.dart';
import '../../shared/services/screenshot_service.dart';
Expand Down Expand Up @@ -129,8 +129,8 @@ class PlayAreaState extends State<PlayArea> {
onPressed: () {
Navigator.push(
context,
MaterialPageRoute<ImageToFenApp>(
builder: (BuildContext context) => const ImageToFenApp(),
MaterialPageRoute<RecognitionPage>(
builder: (BuildContext context) => const RecognitionPage(),
),
);
},
Expand Down

0 comments on commit 390dd2c

Please sign in to comment.