This commit is contained in:
Daniel
2026-03-16 18:02:20 +08:00
parent dafeb0c0a2
commit 84c28ad1c2
18 changed files with 1524 additions and 0 deletions

View File

@@ -0,0 +1,596 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 77;
objects = {
/* Begin PBXContainerItemProxy section */
7D8922492F6800BE001184E1 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 7D89222F2F6800BB001184E1 /* Project object */;
proxyType = 1;
remoteGlobalIDString = 7D8922362F6800BB001184E1;
remoteInfo = Aural;
};
7D8922532F6800BE001184E1 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 7D89222F2F6800BB001184E1 /* Project object */;
proxyType = 1;
remoteGlobalIDString = 7D8922362F6800BB001184E1;
remoteInfo = Aural;
};
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
7D8922372F6800BB001184E1 /* Aural.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Aural.app; sourceTree = BUILT_PRODUCTS_DIR; };
7D8922482F6800BE001184E1 /* AuralTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = AuralTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
7D8922522F6800BE001184E1 /* AuralUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = AuralUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFileSystemSynchronizedRootGroup section */
7D8922392F6800BB001184E1 /* Aural */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = Aural;
sourceTree = "<group>";
};
7D89224B2F6800BE001184E1 /* AuralTests */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = AuralTests;
sourceTree = "<group>";
};
7D8922552F6800BF001184E1 /* AuralUITests */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = AuralUITests;
sourceTree = "<group>";
};
/* End PBXFileSystemSynchronizedRootGroup section */
/* Begin PBXFrameworksBuildPhase section */
7D8922342F6800BB001184E1 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
7D8922452F6800BE001184E1 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
7D89224F2F6800BE001184E1 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
7D89222E2F6800BB001184E1 = {
isa = PBXGroup;
children = (
7D8922392F6800BB001184E1 /* Aural */,
7D89224B2F6800BE001184E1 /* AuralTests */,
7D8922552F6800BF001184E1 /* AuralUITests */,
7D8922382F6800BB001184E1 /* Products */,
);
sourceTree = "<group>";
};
7D8922382F6800BB001184E1 /* Products */ = {
isa = PBXGroup;
children = (
7D8922372F6800BB001184E1 /* Aural.app */,
7D8922482F6800BE001184E1 /* AuralTests.xctest */,
7D8922522F6800BE001184E1 /* AuralUITests.xctest */,
);
name = Products;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
7D8922362F6800BB001184E1 /* Aural */ = {
isa = PBXNativeTarget;
buildConfigurationList = 7D89225C2F6800BF001184E1 /* Build configuration list for PBXNativeTarget "Aural" */;
buildPhases = (
7D8922332F6800BB001184E1 /* Sources */,
7D8922342F6800BB001184E1 /* Frameworks */,
7D8922352F6800BB001184E1 /* Resources */,
);
buildRules = (
);
dependencies = (
);
fileSystemSynchronizedGroups = (
7D8922392F6800BB001184E1 /* Aural */,
);
name = Aural;
packageProductDependencies = (
);
productName = Aural;
productReference = 7D8922372F6800BB001184E1 /* Aural.app */;
productType = "com.apple.product-type.application";
};
7D8922472F6800BE001184E1 /* AuralTests */ = {
isa = PBXNativeTarget;
buildConfigurationList = 7D89225F2F6800BF001184E1 /* Build configuration list for PBXNativeTarget "AuralTests" */;
buildPhases = (
7D8922442F6800BE001184E1 /* Sources */,
7D8922452F6800BE001184E1 /* Frameworks */,
7D8922462F6800BE001184E1 /* Resources */,
);
buildRules = (
);
dependencies = (
7D89224A2F6800BE001184E1 /* PBXTargetDependency */,
);
fileSystemSynchronizedGroups = (
7D89224B2F6800BE001184E1 /* AuralTests */,
);
name = AuralTests;
packageProductDependencies = (
);
productName = AuralTests;
productReference = 7D8922482F6800BE001184E1 /* AuralTests.xctest */;
productType = "com.apple.product-type.bundle.unit-test";
};
7D8922512F6800BE001184E1 /* AuralUITests */ = {
isa = PBXNativeTarget;
buildConfigurationList = 7D8922622F6800BF001184E1 /* Build configuration list for PBXNativeTarget "AuralUITests" */;
buildPhases = (
7D89224E2F6800BE001184E1 /* Sources */,
7D89224F2F6800BE001184E1 /* Frameworks */,
7D8922502F6800BE001184E1 /* Resources */,
);
buildRules = (
);
dependencies = (
7D8922542F6800BE001184E1 /* PBXTargetDependency */,
);
fileSystemSynchronizedGroups = (
7D8922552F6800BF001184E1 /* AuralUITests */,
);
name = AuralUITests;
packageProductDependencies = (
);
productName = AuralUITests;
productReference = 7D8922522F6800BE001184E1 /* AuralUITests.xctest */;
productType = "com.apple.product-type.bundle.ui-testing";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
7D89222F2F6800BB001184E1 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 1620;
LastUpgradeCheck = 1620;
TargetAttributes = {
7D8922362F6800BB001184E1 = {
CreatedOnToolsVersion = 16.2;
};
7D8922472F6800BE001184E1 = {
CreatedOnToolsVersion = 16.2;
TestTargetID = 7D8922362F6800BB001184E1;
};
7D8922512F6800BE001184E1 = {
CreatedOnToolsVersion = 16.2;
TestTargetID = 7D8922362F6800BB001184E1;
};
};
};
buildConfigurationList = 7D8922322F6800BB001184E1 /* Build configuration list for PBXProject "Aural" */;
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 7D89222E2F6800BB001184E1;
minimizedProjectReferenceProxies = 1;
preferredProjectObjectVersion = 77;
productRefGroup = 7D8922382F6800BB001184E1 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
7D8922362F6800BB001184E1 /* Aural */,
7D8922472F6800BE001184E1 /* AuralTests */,
7D8922512F6800BE001184E1 /* AuralUITests */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
7D8922352F6800BB001184E1 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
7D8922462F6800BE001184E1 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
7D8922502F6800BE001184E1 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
7D8922332F6800BB001184E1 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
7D8922442F6800BE001184E1 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
7D89224E2F6800BE001184E1 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
7D89224A2F6800BE001184E1 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = 7D8922362F6800BB001184E1 /* Aural */;
targetProxy = 7D8922492F6800BE001184E1 /* PBXContainerItemProxy */;
};
7D8922542F6800BE001184E1 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = 7D8922362F6800BB001184E1 /* Aural */;
targetProxy = 7D8922532F6800BE001184E1 /* PBXContainerItemProxy */;
};
/* End PBXTargetDependency section */
/* Begin XCBuildConfiguration section */
7D89225A2F6800BF001184E1 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
7D89225B2F6800BF001184E1 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SWIFT_COMPILATION_MODE = wholemodule;
};
name = Release;
};
7D89225D2F6800BF001184E1 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = Aural/Aural.entitlements;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"Aural/Preview Content\"";
DEVELOPMENT_TEAM = 3882NS6655;
ENABLE_HARDENED_RUNTIME = YES;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.2;
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 14.7;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = Bimwe.Aural;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
XROS_DEPLOYMENT_TARGET = 2.2;
};
name = Debug;
};
7D89225E2F6800BF001184E1 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_ENTITLEMENTS = Aural/Aural.entitlements;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_ASSET_PATHS = "\"Aural/Preview Content\"";
DEVELOPMENT_TEAM = 3882NS6655;
ENABLE_HARDENED_RUNTIME = YES;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]" = YES;
"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]" = YES;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]" = UIStatusBarStyleDefault;
"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]" = UIStatusBarStyleDefault;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.2;
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 14.7;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = Bimwe.Aural;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
XROS_DEPLOYMENT_TARGET = 2.2;
};
name = Release;
};
7D8922602F6800BF001184E1 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 3882NS6655;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.2;
MACOSX_DEPLOYMENT_TARGET = 14.7;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = Bimwe.AuralTests;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Aural.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Aural";
XROS_DEPLOYMENT_TARGET = 2.2;
};
name = Debug;
};
7D8922612F6800BF001184E1 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 3882NS6655;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.2;
MACOSX_DEPLOYMENT_TARGET = 14.7;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = Bimwe.AuralTests;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Aural.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Aural";
XROS_DEPLOYMENT_TARGET = 2.2;
};
name = Release;
};
7D8922632F6800BF001184E1 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 3882NS6655;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.2;
MACOSX_DEPLOYMENT_TARGET = 14.7;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = Bimwe.AuralUITests;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
TEST_TARGET_NAME = Aural;
XROS_DEPLOYMENT_TARGET = 2.2;
};
name = Debug;
};
7D8922642F6800BF001184E1 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 3882NS6655;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.2;
MACOSX_DEPLOYMENT_TARGET = 14.7;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = Bimwe.AuralUITests;
PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx xros xrsimulator";
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2,7";
TEST_TARGET_NAME = Aural;
XROS_DEPLOYMENT_TARGET = 2.2;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
7D8922322F6800BB001184E1 /* Build configuration list for PBXProject "Aural" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7D89225A2F6800BF001184E1 /* Debug */,
7D89225B2F6800BF001184E1 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
7D89225C2F6800BF001184E1 /* Build configuration list for PBXNativeTarget "Aural" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7D89225D2F6800BF001184E1 /* Debug */,
7D89225E2F6800BF001184E1 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
7D89225F2F6800BF001184E1 /* Build configuration list for PBXNativeTarget "AuralTests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7D8922602F6800BF001184E1 /* Debug */,
7D8922612F6800BF001184E1 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
7D8922622F6800BF001184E1 /* Build configuration list for PBXNativeTarget "AuralUITests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
7D8922632F6800BF001184E1 /* Debug */,
7D8922642F6800BF001184E1 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 7D89222F2F6800BB001184E1 /* Project object */;
}

View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>SchemeUserState</key>
<dict>
<key>Aural.xcscheme_^#shared#^_</key>
<dict>
<key>orderHint</key>
<integer>0</integer>
</dict>
</dict>
</dict>
</plist>

View File

@@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,85 @@
{
"images" : [
{
"idiom" : "universal",
"platform" : "ios",
"size" : "1024x1024"
},
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "dark"
}
],
"idiom" : "universal",
"platform" : "ios",
"size" : "1024x1024"
},
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "tinted"
}
],
"idiom" : "universal",
"platform" : "ios",
"size" : "1024x1024"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "16x16"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "16x16"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "32x32"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "32x32"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "128x128"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "128x128"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "256x256"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "256x256"
},
{
"idiom" : "mac",
"scale" : "1x",
"size" : "512x512"
},
{
"idiom" : "mac",
"scale" : "2x",
"size" : "512x512"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,129 @@
import Foundation
import AVFAudio
import Combine
///
/// - AVAudioSession AirPods
@MainActor
final class AudioRouteManager: ObservableObject {
static let shared = AudioRouteManager()
enum RouteStatus {
case unknown
case bluetoothActive(deviceName: String?)
case notAvailable
}
@Published private(set) var routeStatus: RouteStatus = .unknown
@Published private(set) var isBluetoothPreferredActive: Bool = false
@Published private(set) var currentInputName: String?
private let session = AVAudioSession.sharedInstance()
private var notificationTokens: [NSObjectProtocol] = []
private init() {
configureSession()
startObservingRouteChanges()
refreshCurrentRoute()
}
// MARK: - Public API
///
func activateSession() {
do {
try session.setActive(true, options: [])
refreshCurrentRoute()
} catch {
print("AudioRouteManager activateSession error: \(error)")
}
}
///
func deactivateSession() {
do {
try session.setActive(false, options: [.notifyOthersOnDeactivation])
} catch {
print("AudioRouteManager deactivateSession error: \(error)")
}
}
///
func refreshCurrentRoute() {
updateRouteState(for: session.currentRoute)
}
// MARK: - Private
private func configureSession() {
do {
// .playAndRecord + measurement / spokenAudio & A2DP
try session.setCategory(
.playAndRecord,
mode: .spokenAudio,
options: [
.allowBluetooth,
.allowBluetoothA2DP
]
)
} catch {
print("AudioRouteManager configureSession error: \(error)")
}
}
private func startObservingRouteChanges() {
let center = NotificationCenter.default
let token = center.addObserver(
forName: AVAudioSession.routeChangeNotification,
object: session,
queue: .main
) { [weak self] notification in
self?.handleRouteChange(notification: notification)
}
notificationTokens.append(token)
}
private func handleRouteChange(notification: Notification) {
guard let userInfo = notification.userInfo,
let reasonRaw = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt,
let reason = AVAudioSession.RouteChangeReason(rawValue: reasonRaw) else {
refreshCurrentRoute()
return
}
switch reason {
case .newDeviceAvailable, .oldDeviceUnavailable, .routeConfigurationChange:
refreshCurrentRoute()
default:
break
}
}
private func updateRouteState(for route: AVAudioSessionRouteDescription) {
let inputs = route.inputs
let activeInput = inputs.first
currentInputName = activeInput?.portName
if let portType = activeInput?.portType, isBluetoothPort(portType) {
isBluetoothPreferredActive = true
routeStatus = .bluetoothActive(deviceName: activeInput?.portName)
} else {
isBluetoothPreferredActive = false
routeStatus = inputs.isEmpty ? .notAvailable : .notAvailable
}
}
private func isBluetoothPort(_ portType: AVAudioSession.Port) -> Bool {
switch portType {
case .bluetoothHFP, .bluetoothA2DP, .bluetoothLE:
return true
default:
return false
}
}
}

10
Aural/Aural.entitlements Normal file
View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.app-sandbox</key>
<true/>
<key>com.apple.security.files.user-selected.read-only</key>
<true/>
</dict>
</plist>

29
Aural/AuralApp.swift Normal file
View File

@@ -0,0 +1,29 @@
//
// AuralApp.swift
// Aural
//
// Created by on 2026/3/16.
//
import SwiftUI
import SwiftData
@main
struct AuralApp: App {
var sharedModelContainer: ModelContainer = {
let schema = Schema([
TranscriptSession.self,
TranscriptSegment.self
])
let configuration = ModelConfiguration()
return try! ModelContainer(for: schema, configurations: [configuration])
}()
var body: some Scene {
WindowGroup {
ContentView()
}
.modelContainer(sharedModelContainer)
}
}

179
Aural/ContentView.swift Normal file
View File

@@ -0,0 +1,179 @@
//
// ContentView.swift
// Aural
//
// Created by on 2026/3/16.
//
import SwiftUI
import SwiftData
struct ContentView: View {
@Environment(\.modelContext) private var modelContext
@Query(sort: \TranscriptSession.startTime, order: .reverse)
private var sessions: [TranscriptSession]
@StateObject private var routeManager = AudioRouteManager.shared
@StateObject private var speechManager = SpeechRecognitionManager.shared
@State private var isRecording: Bool = false
var body: some View {
ZStack {
Color(.systemBackground)
.ignoresSafeArea()
VStack(spacing: 32) {
statusBar
Spacer()
recordingButton
scrollTranscriptView
.frame(maxHeight: .infinity, alignment: .top)
Spacer(minLength: 32)
}
.padding(.horizontal, 32)
}
}
private var recordingButton: some View {
let isBluetoothOK: Bool
switch routeManager.routeStatus {
case .bluetoothActive:
isBluetoothOK = true
default:
isBluetoothOK = false
}
let disabled = !isBluetoothOK || speechManager.isRecognizing && !isRecording
return Button {
Task {
if !isRecording {
// Start
let granted = await speechManager.requestAuthorization()
guard granted else { return }
routeManager.activateSession()
let deviceName = routeManager.currentInputName ?? "Unknown"
speechManager.attach(modelContext: modelContext)
speechManager.startSession(deviceName: deviceName)
} else {
// Stop
speechManager.stopSession()
routeManager.deactivateSession()
}
withAnimation(.spring(response: 0.25, dampingFraction: 0.8)) {
isRecording.toggle()
}
}
} label: {
ZStack {
Circle()
.fill(disabled ? Color.gray.opacity(0.4) : (isRecording ? Color.red : Color.primary))
.frame(width: 96, height: 96)
Circle()
.strokeBorder(Color.primary.opacity(0.1), lineWidth: 4)
.frame(width: 120, height: 120)
Image(systemName: isRecording ? "stop.fill" : "mic.fill")
.font(.system(size: 28, weight: .bold))
.foregroundStyle(Color.white)
}
}
.buttonStyle(.plain)
.disabled(disabled)
.gesture(
LongPressGesture(minimumDuration: 0.7)
.onEnded { _ in
if isRecording {
speechManager.stopSession()
routeManager.deactivateSession()
withAnimation(.spring(response: 0.25, dampingFraction: 0.8)) {
isRecording = false
}
}
}
)
.accessibilityLabel("Recording")
}
private var scrollTranscriptView: some View {
ScrollView {
VStack(alignment: .leading, spacing: 12) {
if !speechManager.liveText.isEmpty {
Text(speechManager.liveText)
.font(.body)
.foregroundStyle(.primary)
.padding(.vertical, 4)
}
if let latest = sessions.first {
ForEach(latest.segments, id: \.id) { segment in
Text(segment.text)
.font(.callout)
.foregroundStyle(.secondary)
}
}
}
.frame(maxWidth: .infinity, alignment: .leading)
.padding(.vertical, 8)
}
}
private var statusBar: some View {
HStack {
Circle()
.fill(statusColor)
.frame(width: 8, height: 8)
if let error = speechManager.lastErrorDescription {
Image(systemName: "exclamationmark.triangle.fill")
.foregroundStyle(.yellow)
Text(error)
.font(.caption)
} else {
switch routeManager.routeStatus {
case .bluetoothActive(let name):
Text(name ?? "Bluetooth")
.font(.caption)
case .notAvailable:
Text("No Mic")
.font(.caption)
default:
EmptyView()
}
}
Spacer()
}
.padding(.horizontal, 32)
.padding(.top, 12)
}
private var statusColor: Color {
if speechManager.lastErrorDescription != nil {
return .red
}
switch routeManager.routeStatus {
case .bluetoothActive:
return .green
case .notAvailable:
return .gray
default:
return .orange
}
}
}
#Preview {
ContentView()
.modelContainer(for: [TranscriptSession.self, TranscriptSegment.self], inMemory: true)
}

View File

@@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,252 @@
import Foundation
import AVFAudio
import Speech
import SwiftData
///
/// - 使 SFSpeechRecognizer + AVAudioEngine
/// - partial UI
/// - final / SwiftData TranscriptSession
@MainActor
final class SpeechRecognitionManager: ObservableObject {
static let shared = SpeechRecognitionManager()
// MARK: -
/// partial
@Published private(set) var liveText: String = ""
///
@Published private(set) var isRecognizing: Bool = false
///
@Published private(set) var lastErrorDescription: String?
// MARK: -
private let audioEngine = AVAudioEngine()
private var speechRecognizer: SFSpeechRecognizer?
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
///
private var lastResultDate: Date?
private var currentUtteranceBuffer: String = ""
/// SwiftData
var modelContext: ModelContext?
private var currentSession: TranscriptSession?
private init() {
configureRecognizer()
}
// MARK: -
private func configureRecognizer(locale: Locale = Locale(identifier: Locale.current.identifier)) {
// 使 zh-CN
if Locale.current.language.languageCode?.identifier == "zh" {
speechRecognizer = SFSpeechRecognizer(locale: Locale(identifier: "zh-CN"))
} else {
speechRecognizer = SFSpeechRecognizer(locale: locale)
}
}
func attach(modelContext: ModelContext) {
self.modelContext = modelContext
}
// MARK: -
func requestAuthorization() async -> Bool {
let speechGranted = await withCheckedContinuation { (cont: CheckedContinuation<Bool, Never>) in
SFSpeechRecognizer.requestAuthorization { status in
cont.resume(returning: status == .authorized)
}
}
let micGranted: Bool = await withCheckedContinuation { (cont: CheckedContinuation<Bool, Never>) in
AVAudioApplication.requestRecordPermission { granted in
cont.resume(returning: granted)
}
}
if !speechGranted || !micGranted {
lastErrorDescription = "权限未授予"
} else {
lastErrorDescription = nil
}
return speechGranted && micGranted
}
// MARK: - Session
func startSession(deviceName: String) {
guard !isRecognizing else { return }
guard let recognizer = speechRecognizer, recognizer.isAvailable else {
lastErrorDescription = "语音识别不可用"
return
}
guard let modelContext else {
lastErrorDescription = "数据上下文不可用"
return
}
resetInternalState()
let session = TranscriptSession(
startTime: Date(),
deviceConnected: deviceName,
segments: []
)
modelContext.insert(session)
currentSession = session
setupRecognitionPipeline()
}
func stopSession(commit: Bool = true) {
guard isRecognizing else { return }
audioEngine.stop()
audioEngine.inputNode.removeTap(onBus: 0)
recognitionRequest?.endAudio()
recognitionTask?.cancel()
isRecognizing = false
if commit {
finalizeCurrentUtteranceIfNeeded()
if let currentSession {
currentSession.endTime = Date()
}
} else {
// Session
}
}
// MARK: -
private func setupRecognitionPipeline() {
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(.record, mode: .measurement, options: [.allowBluetooth, .allowBluetoothA2DP])
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
} catch {
lastErrorDescription = "音频会话配置失败"
return
}
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest else {
lastErrorDescription = "创建请求失败"
return
}
recognitionRequest.shouldReportPartialResults = true
let inputNode = audioEngine.inputNode
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { [weak self] buffer, _ in
guard let self, let recognitionRequest else { return }
recognitionRequest.append(buffer)
}
audioEngine.prepare()
do {
try audioEngine.start()
isRecognizing = true
lastErrorDescription = nil
} catch {
lastErrorDescription = "音频引擎启动失败"
return
}
guard let recognizer = speechRecognizer else {
lastErrorDescription = "识别器未初始化"
return
}
recognitionTask = recognizer.recognitionTask(with: recognitionRequest) { [weak self] result, error in
guard let self else { return }
if let result = result {
handleRecognitionResult(result)
}
if error != nil || (result?.isFinal ?? false) {
//
finalizeCurrentUtteranceIfNeeded()
self.isRecognizing = false
}
}
}
// MARK: - &
private func handleRecognitionResult(_ result: SFSpeechRecognitionResult) {
let text = result.bestTranscription.formattedString
liveText = text
lastResultDate = Date()
if result.isFinal {
// Final Segment
appendSegment(with: text)
liveText = ""
currentUtteranceBuffer = ""
} else {
// Partial UI utterance buffer
currentUtteranceBuffer = text
}
}
/// 2
func checkForSilenceAndFinalizeIfNeeded() {
guard let last = lastResultDate else { return }
let interval = Date().timeIntervalSince(last)
if interval > 2.0 {
finalizeCurrentUtteranceIfNeeded()
}
}
private func finalizeCurrentUtteranceIfNeeded() {
let text = currentUtteranceBuffer.trimmingCharacters(in: .whitespacesAndNewlines)
guard !text.isEmpty else { return }
appendSegment(with: text)
currentUtteranceBuffer = ""
liveText = ""
}
private func appendSegment(with text: String) {
guard let currentSession else { return }
let segment = TranscriptSegment(
timestamp: Date(),
text: text
)
currentSession.segments.append(segment)
}
// MARK: -
private func resetInternalState() {
liveText = ""
lastErrorDescription = nil
lastResultDate = nil
currentUtteranceBuffer = ""
recognitionTask?.cancel()
recognitionTask = nil
recognitionRequest = nil
if audioEngine.isRunning {
audioEngine.stop()
audioEngine.inputNode.removeTap(onBus: 0)
}
}
}

View File

@@ -0,0 +1,40 @@
import Foundation
import SwiftData
@Model
final class TranscriptSegment {
var id: UUID
var timestamp: Date
var text: String
init(id: UUID = UUID(), timestamp: Date = Date(), text: String) {
self.id = id
self.timestamp = timestamp
self.text = text
}
}
@Model
final class TranscriptSession {
var id: UUID
var startTime: Date
var endTime: Date?
var deviceConnected: String
var segments: [TranscriptSegment]
init(
id: UUID = UUID(),
startTime: Date = Date(),
endTime: Date? = nil,
deviceConnected: String,
segments: [TranscriptSegment] = []
) {
self.id = id
self.startTime = startTime
self.endTime = endTime
self.deviceConnected = deviceConnected
self.segments = segments
}
}

View File

@@ -0,0 +1,16 @@
//
// AuralTests.swift
// AuralTests
//
// Created by on 2026/3/16.
//
import Testing
struct AuralTests {
@Test func example() async throws {
// Write your test here and use APIs like `#expect(...)` to check expected conditions.
}
}

View File

@@ -0,0 +1,43 @@
//
// AuralUITests.swift
// AuralUITests
//
// Created by on 2026/3/16.
//
import XCTest
final class AuralUITests: XCTestCase {
override func setUpWithError() throws {
// Put setup code here. This method is called before the invocation of each test method in the class.
// In UI tests it is usually best to stop immediately when a failure occurs.
continueAfterFailure = false
// In UI tests its important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.
}
override func tearDownWithError() throws {
// Put teardown code here. This method is called after the invocation of each test method in the class.
}
@MainActor
func testExample() throws {
// UI tests must launch the application that they test.
let app = XCUIApplication()
app.launch()
// Use XCTAssert and related functions to verify your tests produce the correct results.
}
@MainActor
func testLaunchPerformance() throws {
if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) {
// This measures how long it takes to launch your application.
measure(metrics: [XCTApplicationLaunchMetric()]) {
XCUIApplication().launch()
}
}
}
}

View File

@@ -0,0 +1,33 @@
//
// AuralUITestsLaunchTests.swift
// AuralUITests
//
// Created by on 2026/3/16.
//
import XCTest
final class AuralUITestsLaunchTests: XCTestCase {
override class var runsForEachTargetApplicationUIConfiguration: Bool {
true
}
override func setUpWithError() throws {
continueAfterFailure = false
}
@MainActor
func testLaunch() throws {
let app = XCUIApplication()
app.launch()
// Insert steps here to perform after app launch but before taking a screenshot,
// such as logging into a test account or navigating somewhere in the app
let attachment = XCTAttachment(screenshot: app.screenshot())
attachment.name = "Launch Screen"
attachment.lifetime = .keepAlways
add(attachment)
}
}

View File

@@ -0,0 +1,68 @@
第一步:项目初始化与全局数据流设计 (发给 Agent 的 Prompt 1)
Role & Context: 你现在是一个资深的 iOS 架构师。我们需要开发一个名为 Airtep Voice 的 iOS POC 应用。这个应用的核心功能是:连接 AirPods实时采集会议或谈话的语音并利用苹果原生框架进行流式转写最后将文本结构化落盘。
任务 1项目初始化与 UI 规范
请创建一个基于 SwiftUI 和 SwiftData 的项目。
在 Info.plist 中添加必要的权限描述NSMicrophoneUsageDescription (用于环境录音) 和 NSSpeechRecognitionUsageDescription (用于实时语音转文字)。
UI 设计规范(极高优先级):在视觉呈现上,请严格遵循极简主义和对称美学,深度参考 Apple 的原生设计原则。所有的图标和界面元素必须采用纯色方案,绝对不要使用渐变色。界面需尽量去除多余的辅助性文字,依靠几何对称和留白来引导用户操作。主界面只需要一个居中的、对称的录音控制按钮(纯色圆形或圆角矩形),以及下方一个平滑展开的滚动文本视图。
任务 2数据架构设计
为了让产生的数据在未来具备良好的“可观测、可对齐、可索引”特性,请使用 SwiftData 定义一个 TranscriptSession 模型。
包含字段:
id: UUID
startTime: Date
endTime: Date?
deviceConnected: String (记录当前使用的音频外设名称)
segments: [TranscriptSegment] (嵌套模型,记录每一句话的 timestamp 和 text便于后续切片和对齐)
请输出初始化的工程结构建议、SwiftData 模型代码以及极简风格的主视图结构。
第二步:核心音频路由与蓝牙设备强制接管 (发给 Agent 的 Prompt 2)
任务 3开发 AVAudioSession 路由管理器
现在我们需要编写底层的音频控制模块 AudioRouteManager.swift。
配置 AVAudioSession设置 category 为 .playAndRecordmode 为 .measurement 或 .spokenAudio。
关键选项:必须包含 .allowBluetooth 和 .allowBluetoothA2DP确保系统能强制从用户的蓝牙耳机AirPods获取音频输入而不是默认的手机麦克风。
编写一个状态监听器,当系统音频路由发生变化(例如用户摘下 AirPods能够实时抛出状态变更UI 需根据这个状态将纯色录音按钮置灰或改变形状。
请提供这个单例管理类的完整 Swift 代码。
第三步:实时语音流式转文字引擎 (发给 Agent 的 Prompt 3)
任务 4开发 SFSpeechRecognizer 流式处理引擎
我们需要实现 SpeechRecognitionManager.swift。
实例化 SFSpeechRecognizer并确保 locale 设置为当前设备语言(默认中文 zh-CN
使用 AVAudioEngine 捕获刚才配置好的音频输入节点的 buffer。
创建 SFSpeechAudioBufferRecognitionRequest将 shouldReportPartialResults 设为 true。
在 installTap 的回调中,将音频流实时喂给识别请求。
结构化处理当转写回调触发时不要只拼接长文本。如果是中间结果partial只更新当前 UI如果是最终结果或者根据静音停顿超过 2 秒作为分句逻辑),将这段话封装为一个 TranscriptSegment 对象,对齐时间戳,并追加到 SwiftData 的当前 Session 中,保证所有数据都是结构化且可索引的。
请输出包含上述逻辑的完整类实现,并确保它是一个 ObservableObject以便 SwiftUI 视图实时绑定数据。
第四步:界面组装与交互联动 (发给 Agent 的 Prompt 4)
任务 5组装 POC 完整流程
结合前面写好的 AudioRouteManager 和 SpeechRecognitionManager以及极简的 SwiftUI 视图,把它们串联起来。
当用户点击屏幕正中央的纯色对称按钮时,请求权限 -> 检查 AirPods 连接状态 -> 启动 AudioEngine -> 开始流式转写。
在按钮下方,实时渲染当前正在说的话。
提供一个手势(如向下轻扫或长按按钮)来停止 Session并将这一整段结构化数据正式持久化到本地。
处理可能出现的异常边界(如未授权、未连接麦克风),用极简的图标震动或纯色状态条进行提示,避免长篇大论的弹窗文字。
请输出整合后的 ContentView.swift 和应用入口文件,确保这份代码丢进 Xcode 就能直接编译运行。