react-native-audiosprites 0.2.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +20 -0
- package/README.md +121 -0
- package/lib/module/index.js +96 -0
- package/lib/module/index.js.map +1 -0
- package/lib/module/package.json +1 -0
- package/lib/typescript/package.json +1 -0
- package/lib/typescript/src/index.d.ts +19 -0
- package/lib/typescript/src/index.d.ts.map +1 -0
- package/package.json +156 -0
- package/src/index.tsx +110 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 qalqi
|
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
5
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
6
|
+
in the Software without restriction, including without limitation the rights
|
|
7
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
8
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
9
|
+
furnished to do so, subject to the following conditions:
|
|
10
|
+
|
|
11
|
+
The above copyright notice and this permission notice shall be included in all
|
|
12
|
+
copies or substantial portions of the Software.
|
|
13
|
+
|
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
15
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
16
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
17
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
18
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
19
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
20
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
# react-native-audiosprites
|
|
2
|
+
|
|
3
|
+
A universal player for audio sprites generated by the 'audiosprite' tool.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```sh
|
|
8
|
+
npm install react-native-audiosprites
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
First, you need to generate an audio sprite and a JSON manifest file using the `audiosprite` tool.
|
|
14
|
+
|
|
15
|
+
Assuming you have `audiosprite` installed globally:
|
|
16
|
+
|
|
17
|
+
```sh
|
|
18
|
+
audiosprite --output audiosprite --format howler --path ./src/__tests__/ Sound_1.m4a Sound_2.m4a Sound_3.m4a Sound_4.m4a
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
This command will generate `audiosprite.json`, `audiosprite.mp3`, `audiosprite.ogg`, `audiosprite.m4a`, and `audiosprite.ac3` in the `src/__tests__/` directory. The `--path` argument is important as it tells the player where to find the audio files relative to the JSON manifest.
|
|
22
|
+
|
|
23
|
+
Then, you can use the `AudioSpritePlayer` to play the sounds from the sprite.
|
|
24
|
+
|
|
25
|
+
### Browser Environment
|
|
26
|
+
|
|
27
|
+
```typescript
|
|
28
|
+
import { AudioSpritePlayer } from 'react-native-audiosprites';
|
|
29
|
+
|
|
30
|
+
// Ensure AudioContext is available
|
|
31
|
+
const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)();
|
|
32
|
+
|
|
33
|
+
// Use the native fetch API
|
|
34
|
+
const player = new AudioSpritePlayer({
|
|
35
|
+
audioContext,
|
|
36
|
+
fetch: window.fetch.bind(window),
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
async function playSound(soundName: string) {
|
|
40
|
+
try {
|
|
41
|
+
// Load the audio sprite manifest and audio files
|
|
42
|
+
// Adjust the path to your audiosprite.json file
|
|
43
|
+
await player.load('./src/__tests__/audiosprite.json');
|
|
44
|
+
console.log('Audio sprite loaded successfully.');
|
|
45
|
+
|
|
46
|
+
// Play a sound from the spritemap
|
|
47
|
+
player.play(soundName);
|
|
48
|
+
console.log(`Playing sound: ${soundName}`);
|
|
49
|
+
} catch (error) {
|
|
50
|
+
console.error('Error playing sound:', error);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// Example usage:
|
|
55
|
+
playSound('Sound_1');
|
|
56
|
+
// playSound('Sound_2');
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
### React Native Environment
|
|
60
|
+
|
|
61
|
+
For React Native, you'll typically need a polyfill or a library that provides `AudioContext` and `fetch` functionality, as these are Web APIs. `react-native-audio-api` is a good option for `AudioContext`. For `fetch`, React Native provides its own global `fetch` implementation.
|
|
62
|
+
|
|
63
|
+
First, install `react-native-audio-api`:
|
|
64
|
+
|
|
65
|
+
```sh
|
|
66
|
+
npm install react-native-audio-api
|
|
67
|
+
# or
|
|
68
|
+
yarn add react-native-audio-api
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
Then, you can use it like this:
|
|
72
|
+
|
|
73
|
+
```typescript
|
|
74
|
+
import { AudioSpritePlayer } from 'react-native-audiosprites';
|
|
75
|
+
import { AudioContext } from 'react-native-audio-api'; // Import from the library
|
|
76
|
+
|
|
77
|
+
// Create an instance of AudioContext
|
|
78
|
+
const audioContext = new AudioContext();
|
|
79
|
+
|
|
80
|
+
// Use the global fetch provided by React Native
|
|
81
|
+
const player = new AudioSpritePlayer({
|
|
82
|
+
audioContext,
|
|
83
|
+
fetch: fetch, // React Native provides a global fetch
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
async function playRNSound(soundName: string) {
|
|
87
|
+
try {
|
|
88
|
+
// Load the audio sprite manifest and audio files
|
|
89
|
+
// Adjust the path to your audiosprite.json file.
|
|
90
|
+
// In React Native, you might need to bundle your audio files
|
|
91
|
+
// and refer to them using a local asset path or a remote URL.
|
|
92
|
+
// For this example, we assume it's accessible via a URL.
|
|
93
|
+
await player.load('http://localhost:8081/src/__tests__/audiosprite.json');
|
|
94
|
+
console.log('React Native Audio sprite loaded successfully.');
|
|
95
|
+
|
|
96
|
+
// Play a sound from the spritemap
|
|
97
|
+
player.play(soundName);
|
|
98
|
+
console.log(`Playing React Native sound: ${soundName}`);
|
|
99
|
+
} catch (error) {
|
|
100
|
+
console.error('Error playing React Native sound:', error);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Example usage:
|
|
105
|
+
playRNSound('Sound_1');
|
|
106
|
+
// playRNSound('Sound_2');
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
## Contributing
|
|
110
|
+
|
|
111
|
+
- [Development workflow](CONTRIBUTING.md#development-workflow)
|
|
112
|
+
- [Sending a pull request](CONTRIBUTING.md#sending-a-pull-request)
|
|
113
|
+
- [Code of conduct](CODE_OF_CONDUCT.md)
|
|
114
|
+
|
|
115
|
+
## License
|
|
116
|
+
|
|
117
|
+
MIT
|
|
118
|
+
|
|
119
|
+
---
|
|
120
|
+
|
|
121
|
+
Made with [create-react-native-library](https://github.com/callstack/react-native-builder-bob)
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Universal player for audio sprites generated by the 'audiosprite' tool.
|
|
5
|
+
* It requires an AudioContext and fetch to be injected.
|
|
6
|
+
*/
|
|
7
|
+
export class AudioSpritePlayer {
|
|
8
|
+
constructor({
|
|
9
|
+
audioContext,
|
|
10
|
+
fetch
|
|
11
|
+
}) {
|
|
12
|
+
if (!audioContext) {
|
|
13
|
+
throw new Error('An AudioContext instance must be provided.');
|
|
14
|
+
}
|
|
15
|
+
if (!fetch) {
|
|
16
|
+
throw new Error('A fetch implementation must be provided.');
|
|
17
|
+
}
|
|
18
|
+
this.audioContext = audioContext;
|
|
19
|
+
this.fetch = fetch;
|
|
20
|
+
this.audioBuffer = null;
|
|
21
|
+
this.manifest = null;
|
|
22
|
+
}
|
|
23
|
+
async load(jsonPath) {
|
|
24
|
+
try {
|
|
25
|
+
const response = await this.fetch(jsonPath);
|
|
26
|
+
if (!response.ok) {
|
|
27
|
+
throw new Error(`Failed to fetch manifest: ${response.statusText}`);
|
|
28
|
+
}
|
|
29
|
+
this.manifest = await response.json();
|
|
30
|
+
if (!this.manifest.resources || !this.manifest.spritemap) {
|
|
31
|
+
throw new Error('Invalid audiosprite manifest format. Missing "resources" or "spritemap".');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Find the first supported audio file (e.g., .mp3 or .ogg)
|
|
35
|
+
// For simplicity, we just take the first one.
|
|
36
|
+
const audioFileName = this.manifest.resources[0];
|
|
37
|
+
const audioUrl = new URL(audioFileName, response.url).href;
|
|
38
|
+
const audioResponse = await this.fetch(audioUrl);
|
|
39
|
+
if (!audioResponse.ok) {
|
|
40
|
+
throw new Error(`Failed to fetch audio file: ${audioResponse.statusText}`);
|
|
41
|
+
}
|
|
42
|
+
const arrayBuffer = await audioResponse.arrayBuffer();
|
|
43
|
+
this.audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
|
|
44
|
+
console.log('Audio sprite loaded successfully.');
|
|
45
|
+
} catch (error) {
|
|
46
|
+
console.error('Failed to load audio sprite:', error);
|
|
47
|
+
throw error; // Re-throw for user to catch
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
play(soundName) {
|
|
51
|
+
if (!this.audioBuffer || !this.manifest) {
|
|
52
|
+
console.warn('Audio sprite not loaded. Call load() first.');
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Resume context if it was suspended (e.g., by browser policy)
|
|
57
|
+
if (this.audioContext.state === 'suspended') {
|
|
58
|
+
this.audioContext.resume();
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Get the sound from the 'spritemap'
|
|
62
|
+
const sound = this.manifest.spritemap[soundName];
|
|
63
|
+
if (!sound) {
|
|
64
|
+
console.warn(`Sound "${soundName}" not found in spritemap.`);
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Calculate duration from start/end times
|
|
69
|
+
const duration = sound.end - sound.start;
|
|
70
|
+
if (duration <= 0) {
|
|
71
|
+
console.warn(`Sound "${soundName}" has invalid duration.`);
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
const source = this.audioContext.createBufferSource();
|
|
75
|
+
source.buffer = this.audioBuffer;
|
|
76
|
+
const gain = this.audioContext.createGain();
|
|
77
|
+
gain.gain.setValueAtTime(1.0, this.audioContext.currentTime);
|
|
78
|
+
source.connect(gain);
|
|
79
|
+
gain.connect(this.audioContext.destination);
|
|
80
|
+
|
|
81
|
+
// Use the 'audiosprite' format: start(when, offset, duration)
|
|
82
|
+
source.start(0,
|
|
83
|
+
// Start playing now
|
|
84
|
+
sound.start,
|
|
85
|
+
// The offset
|
|
86
|
+
duration // The calculated duration
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
getManifest() {
|
|
90
|
+
return this.manifest;
|
|
91
|
+
}
|
|
92
|
+
getAudioBuffer() {
|
|
93
|
+
return this.audioBuffer;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"names":["AudioSpritePlayer","constructor","audioContext","fetch","Error","audioBuffer","manifest","load","jsonPath","response","ok","statusText","json","resources","spritemap","audioFileName","audioUrl","URL","url","href","audioResponse","arrayBuffer","decodeAudioData","console","log","error","play","soundName","warn","state","resume","sound","duration","end","start","source","createBufferSource","buffer","gain","createGain","setValueAtTime","currentTime","connect","destination","getManifest","getAudioBuffer"],"sourceRoot":"../../src","sources":["index.tsx"],"mappings":";;AAAA;AACA;AACA;AACA;AACA,OAAO,MAAMA,iBAAiB,CAAC;EAM7BC,WAAWA,CAAC;IAAEC,YAAY;IAAEC;EAAyC,CAAC,EAAE;IACtE,IAAI,CAACD,YAAY,EAAE;MACjB,MAAM,IAAIE,KAAK,CAAC,4CAA4C,CAAC;IAC/D;IACA,IAAI,CAACD,KAAK,EAAE;MACV,MAAM,IAAIC,KAAK,CAAC,0CAA0C,CAAC;IAC7D;IACA,IAAI,CAACF,YAAY,GAAGA,YAAY;IAChC,IAAI,CAACC,KAAK,GAAGA,KAAK;IAClB,IAAI,CAACE,WAAW,GAAG,IAAI;IACvB,IAAI,CAACC,QAAQ,GAAG,IAAI;EACtB;EAEA,MAAMC,IAAIA,CAACC,QAAgB,EAAE;IAC3B,IAAI;MACF,MAAMC,QAAQ,GAAG,MAAM,IAAI,CAACN,KAAK,CAACK,QAAQ,CAAC;MAC3C,IAAI,CAACC,QAAQ,CAACC,EAAE,EAAE;QAChB,MAAM,IAAIN,KAAK,CAAC,6BAA6BK,QAAQ,CAACE,UAAU,EAAE,CAAC;MACrE;MACA,IAAI,CAACL,QAAQ,GAAG,MAAMG,QAAQ,CAACG,IAAI,CAAC,CAAC;MAErC,IAAI,CAAC,IAAI,CAACN,QAAQ,CAACO,SAAS,IAAI,CAAC,IAAI,CAACP,QAAQ,CAACQ,SAAS,EAAE;QACxD,MAAM,IAAIV,KAAK,CACb,0EACF,CAAC;MACH;;MAEA;MACA;MACA,MAAMW,aAAa,GAAG,IAAI,CAACT,QAAQ,CAACO,SAAS,CAAC,CAAC,CAAC;MAChD,MAAMG,QAAQ,GAAG,IAAIC,GAAG,CAACF,aAAa,EAAEN,QAAQ,CAACS,GAAG,CAAC,CAACC,IAAI;MAE1D,MAAMC,aAAa,GAAG,MAAM,IAAI,CAACjB,KAAK,CAACa,QAAQ,CAAC;MAChD,IAAI,CAACI,aAAa,CAACV,EAAE,EAAE;QACrB,MAAM,IAAIN,KAAK,CACb,+BAA+BgB,aAAa,CAACT,UAAU,EACzD,CAAC;MACH;MAEA,MAAMU,WAAW,GAAG,MAAMD,aAAa,CAACC,WAAW,CAAC,CAAC;MAErD,IAAI,CAAChB,WAAW,GAAG,MAAM,IAAI,CAACH,YAAY,CAACoB,eAAe,CAACD,WAAW,CAAC;MACvEE,OAAO,CAACC,GAAG,CAAC,mCAAmC,CAAC;IAClD,CAAC,CAAC,OAAOC,KAAK,EAAE;MACdF,OAAO,CAACE,KAAK,CAAC,8BAA8B,EAAEA,KAAK,CAAC;MACpD,MAAMA,KAAK,CAAC,CAAC;IACf;EACF;EAEAC,IAAIA,CAACC,SAAiB,EAAE;IACtB,IAAI,CAAC,IAAI,CAACtB,WAAW,IAAI,CAAC,IAAI,CAACC,QAAQ,EAAE;MACvCiB,OAAO,CAACK,IAAI,CAAC,6CAA6C,CAAC;MAC3D;IACF;;IAEA;IACA,IAAI,IAAI,CAAC1B,YAAY,CAAC2B,KAAK,KAAK,WAAW,EAAE;MAC3C,IAAI,CAAC3B,YAAY,CAAC4B,MAAM,CAAC,CAAC;IAC5B;;IAEA;IACA,MAAMC,KAAK,GAAG,IAAI,CAACzB,QAAQ,CAACQ,SAAS,CAACa,SAAS,CAAC;IAChD,IAAI,CAACI,KAAK,EAAE;MACVR,OAAO,CAACK,IAAI,CAAC,UAAUD,SAAS,2BAA2B,CAAC;MAC5D;IACF;;IAEA;IACA,MAAMK,QAAQ,GAAGD,KAAK,CAACE,GAAG,GAAGF,KAAK,CAACG,KAAK;IAExC,IAAIF,QAAQ,IAAI,CAAC,EAAE;MACjBT,OAAO,CAACK,IAAI,CAAC,UAAUD,SAAS,yBAAyB,CAAC;MAC1D;IACF;IAEA,MAAMQ,MAAM,GAAG,IAAI,CAACjC,YAAY,CAACkC,kBAAkB,CAAC,CAAC;IACrDD,MAAM,CAACE,MAAM,GAAG,IAAI,CAAChC,WAAW;IAEhC,MAAMiC,IAAI,GAAG,IAAI,CAACpC,YAAY,CAACqC,UAAU,CAAC,CAAC;IAC3CD,IAAI,CAACA,IAAI,CAACE,cAAc,CAAC,GAAG,EAAE,IAAI,CAACtC,YAAY,CAACuC,WAAW,CAAC;IAE5DN,MAAM,CAACO,OAAO,CAACJ,IAAI,CAAC;IACpBA,IAAI,CAACI,OAAO,CAAC,IAAI,CAACxC,YAAY,CAACyC,WAAW,CAAC;;IAE3C;IACAR,MAAM,CAACD,KAAK,CACV,CAAC;IAAE;IACHH,KAAK,CAACG,KAAK;IAAE;IACbF,QAAQ,CAAC;IACX,CAAC;EACH;EAEAY,WAAWA,CAAA,EAAG;IACZ,OAAO,IAAI,CAACtC,QAAQ;EACtB;EAEAuC,cAAcA,CAAA,EAAG;IACf,OAAO,IAAI,CAACxC,WAAW;EACzB;AACF","ignoreList":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"type":"module"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"type":"module"}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Universal player for audio sprites generated by the 'audiosprite' tool.
|
|
3
|
+
* It requires an AudioContext and fetch to be injected.
|
|
4
|
+
*/
|
|
5
|
+
export declare class AudioSpritePlayer {
|
|
6
|
+
audioContext: any;
|
|
7
|
+
fetch: any;
|
|
8
|
+
audioBuffer: any | null;
|
|
9
|
+
manifest: any | null;
|
|
10
|
+
constructor({ audioContext, fetch }: {
|
|
11
|
+
audioContext: any;
|
|
12
|
+
fetch: any;
|
|
13
|
+
});
|
|
14
|
+
load(jsonPath: string): Promise<void>;
|
|
15
|
+
play(soundName: string): void;
|
|
16
|
+
getManifest(): any;
|
|
17
|
+
getAudioBuffer(): any;
|
|
18
|
+
}
|
|
19
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.tsx"],"names":[],"mappings":"AAAA;;;GAGG;AACH,qBAAa,iBAAiB;IAC5B,YAAY,EAAE,GAAG,CAAC;IAClB,KAAK,EAAE,GAAG,CAAC;IACX,WAAW,EAAE,GAAG,GAAG,IAAI,CAAC;IACxB,QAAQ,EAAE,GAAG,GAAG,IAAI,CAAC;gBAET,EAAE,YAAY,EAAE,KAAK,EAAE,EAAE;QAAE,YAAY,EAAE,GAAG,CAAC;QAAC,KAAK,EAAE,GAAG,CAAA;KAAE;IAahE,IAAI,CAAC,QAAQ,EAAE,MAAM;IAoC3B,IAAI,CAAC,SAAS,EAAE,MAAM;IA2CtB,WAAW;IAIX,cAAc;CAGf"}
|
package/package.json
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "react-native-audiosprites",
|
|
3
|
+
"version": "0.2.0-alpha.0",
|
|
4
|
+
"description": "audio sprites ",
|
|
5
|
+
"main": "./lib/module/index.js",
|
|
6
|
+
"types": "./lib/typescript/src/index.d.ts",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": {
|
|
9
|
+
"source": "./src/index.tsx",
|
|
10
|
+
"types": "./lib/typescript/src/index.d.ts",
|
|
11
|
+
"default": "./lib/module/index.js"
|
|
12
|
+
},
|
|
13
|
+
"./package.json": "./package.json"
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"src",
|
|
17
|
+
"lib",
|
|
18
|
+
"android",
|
|
19
|
+
"ios",
|
|
20
|
+
"cpp",
|
|
21
|
+
"*.podspec",
|
|
22
|
+
"react-native.config.js",
|
|
23
|
+
"!ios/build",
|
|
24
|
+
"!android/build",
|
|
25
|
+
"!android/gradle",
|
|
26
|
+
"!android/gradlew",
|
|
27
|
+
"!android/gradlew.bat",
|
|
28
|
+
"!android/local.properties",
|
|
29
|
+
"!**/__tests__",
|
|
30
|
+
"!**/__fixtures__",
|
|
31
|
+
"!**/__mocks__",
|
|
32
|
+
"!**/.*"
|
|
33
|
+
],
|
|
34
|
+
"scripts": {
|
|
35
|
+
"test": "jest",
|
|
36
|
+
"typecheck": "tsc",
|
|
37
|
+
"lint": "eslint \"**/*.{js,ts,tsx}\"",
|
|
38
|
+
"clean": "del-cli lib",
|
|
39
|
+
"prepare": "bob build",
|
|
40
|
+
"release:alpha": "release-it --preRelease=alpha",
|
|
41
|
+
"release": "release-it --only-version",
|
|
42
|
+
"example": "yarn workspace react-native-audiosprites-example"
|
|
43
|
+
},
|
|
44
|
+
"keywords": [
|
|
45
|
+
"react-native",
|
|
46
|
+
"ios",
|
|
47
|
+
"android"
|
|
48
|
+
],
|
|
49
|
+
"repository": {
|
|
50
|
+
"type": "git",
|
|
51
|
+
"url": "git+https://github.com/qalqi/react-native-audiosprites.git"
|
|
52
|
+
},
|
|
53
|
+
"author": "qalqi <qalqi@qalqi.com> (https://github.com/qalqi)",
|
|
54
|
+
"license": "MIT",
|
|
55
|
+
"bugs": {
|
|
56
|
+
"url": "https://github.com/qalqi/react-native-audiosprites/issues"
|
|
57
|
+
},
|
|
58
|
+
"homepage": "https://github.com/qalqi/react-native-audiosprites#readme",
|
|
59
|
+
"publishConfig": {
|
|
60
|
+
"registry": "https://registry.npmjs.org/"
|
|
61
|
+
},
|
|
62
|
+
"devDependencies": {
|
|
63
|
+
"@commitlint/config-conventional": "^19.8.1",
|
|
64
|
+
"@eslint/compat": "^1.3.2",
|
|
65
|
+
"@eslint/eslintrc": "^3.3.1",
|
|
66
|
+
"@eslint/js": "^9.35.0",
|
|
67
|
+
"@evilmartians/lefthook": "^1.12.3",
|
|
68
|
+
"@react-native/babel-preset": "0.81.1",
|
|
69
|
+
"@react-native/eslint-config": "^0.81.1",
|
|
70
|
+
"@release-it/conventional-changelog": "^10.0.1",
|
|
71
|
+
"@types/jest": "^29.5.14",
|
|
72
|
+
"@types/react": "^19.1.12",
|
|
73
|
+
"commitlint": "^19.8.1",
|
|
74
|
+
"del-cli": "^6.0.0",
|
|
75
|
+
"eslint": "^9.35.0",
|
|
76
|
+
"eslint-config-prettier": "^10.1.8",
|
|
77
|
+
"eslint-plugin-prettier": "^5.5.4",
|
|
78
|
+
"jest": "^29.7.0",
|
|
79
|
+
"prettier": "^3.6.2",
|
|
80
|
+
"react": "19.1.0",
|
|
81
|
+
"react-native": "0.81.5",
|
|
82
|
+
"react-native-audio-api": "^0.9.3",
|
|
83
|
+
"react-native-builder-bob": "^0.40.13",
|
|
84
|
+
"release-it": "^19.0.4",
|
|
85
|
+
"typescript": "^5.9.2"
|
|
86
|
+
},
|
|
87
|
+
"peerDependencies": {
|
|
88
|
+
"react": "*",
|
|
89
|
+
"react-native": "*"
|
|
90
|
+
},
|
|
91
|
+
"workspaces": [
|
|
92
|
+
"example"
|
|
93
|
+
],
|
|
94
|
+
"packageManager": "yarn@3.6.1",
|
|
95
|
+
"jest": {
|
|
96
|
+
"preset": "react-native",
|
|
97
|
+
"modulePathIgnorePatterns": [
|
|
98
|
+
"<rootDir>/example/node_modules",
|
|
99
|
+
"<rootDir>/lib/"
|
|
100
|
+
]
|
|
101
|
+
},
|
|
102
|
+
"commitlint": {
|
|
103
|
+
"extends": [
|
|
104
|
+
"@commitlint/config-conventional"
|
|
105
|
+
]
|
|
106
|
+
},
|
|
107
|
+
"release-it": {
|
|
108
|
+
"git": {
|
|
109
|
+
"commitMessage": "chore: release ${version}",
|
|
110
|
+
"tagName": "v${version}"
|
|
111
|
+
},
|
|
112
|
+
"npm": {
|
|
113
|
+
"publish": true
|
|
114
|
+
},
|
|
115
|
+
"github": {
|
|
116
|
+
"release": true
|
|
117
|
+
},
|
|
118
|
+
"plugins": {
|
|
119
|
+
"@release-it/conventional-changelog": {
|
|
120
|
+
"preset": {
|
|
121
|
+
"name": "angular"
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
},
|
|
126
|
+
"prettier": {
|
|
127
|
+
"quoteProps": "consistent",
|
|
128
|
+
"singleQuote": true,
|
|
129
|
+
"tabWidth": 2,
|
|
130
|
+
"trailingComma": "es5",
|
|
131
|
+
"useTabs": false
|
|
132
|
+
},
|
|
133
|
+
"react-native-builder-bob": {
|
|
134
|
+
"source": "src",
|
|
135
|
+
"output": "lib",
|
|
136
|
+
"targets": [
|
|
137
|
+
[
|
|
138
|
+
"module",
|
|
139
|
+
{
|
|
140
|
+
"esm": true
|
|
141
|
+
}
|
|
142
|
+
],
|
|
143
|
+
[
|
|
144
|
+
"typescript",
|
|
145
|
+
{
|
|
146
|
+
"project": "tsconfig.build.json"
|
|
147
|
+
}
|
|
148
|
+
]
|
|
149
|
+
]
|
|
150
|
+
},
|
|
151
|
+
"create-react-native-library": {
|
|
152
|
+
"languages": "js",
|
|
153
|
+
"type": "library",
|
|
154
|
+
"version": "0.54.8"
|
|
155
|
+
}
|
|
156
|
+
}
|
package/src/index.tsx
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Universal player for audio sprites generated by the 'audiosprite' tool.
|
|
3
|
+
* It requires an AudioContext and fetch to be injected.
|
|
4
|
+
*/
|
|
5
|
+
export class AudioSpritePlayer {
|
|
6
|
+
audioContext: any;
|
|
7
|
+
fetch: any;
|
|
8
|
+
audioBuffer: any | null;
|
|
9
|
+
manifest: any | null;
|
|
10
|
+
|
|
11
|
+
constructor({ audioContext, fetch }: { audioContext: any; fetch: any }) {
|
|
12
|
+
if (!audioContext) {
|
|
13
|
+
throw new Error('An AudioContext instance must be provided.');
|
|
14
|
+
}
|
|
15
|
+
if (!fetch) {
|
|
16
|
+
throw new Error('A fetch implementation must be provided.');
|
|
17
|
+
}
|
|
18
|
+
this.audioContext = audioContext;
|
|
19
|
+
this.fetch = fetch;
|
|
20
|
+
this.audioBuffer = null;
|
|
21
|
+
this.manifest = null;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async load(jsonPath: string) {
|
|
25
|
+
try {
|
|
26
|
+
const response = await this.fetch(jsonPath);
|
|
27
|
+
if (!response.ok) {
|
|
28
|
+
throw new Error(`Failed to fetch manifest: ${response.statusText}`);
|
|
29
|
+
}
|
|
30
|
+
this.manifest = await response.json();
|
|
31
|
+
|
|
32
|
+
if (!this.manifest.resources || !this.manifest.spritemap) {
|
|
33
|
+
throw new Error(
|
|
34
|
+
'Invalid audiosprite manifest format. Missing "resources" or "spritemap".'
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// Find the first supported audio file (e.g., .mp3 or .ogg)
|
|
39
|
+
// For simplicity, we just take the first one.
|
|
40
|
+
const audioFileName = this.manifest.resources[0];
|
|
41
|
+
const audioUrl = new URL(audioFileName, response.url).href;
|
|
42
|
+
|
|
43
|
+
const audioResponse = await this.fetch(audioUrl);
|
|
44
|
+
if (!audioResponse.ok) {
|
|
45
|
+
throw new Error(
|
|
46
|
+
`Failed to fetch audio file: ${audioResponse.statusText}`
|
|
47
|
+
);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const arrayBuffer = await audioResponse.arrayBuffer();
|
|
51
|
+
|
|
52
|
+
this.audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
|
|
53
|
+
console.log('Audio sprite loaded successfully.');
|
|
54
|
+
} catch (error) {
|
|
55
|
+
console.error('Failed to load audio sprite:', error);
|
|
56
|
+
throw error; // Re-throw for user to catch
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
play(soundName: string) {
|
|
61
|
+
if (!this.audioBuffer || !this.manifest) {
|
|
62
|
+
console.warn('Audio sprite not loaded. Call load() first.');
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Resume context if it was suspended (e.g., by browser policy)
|
|
67
|
+
if (this.audioContext.state === 'suspended') {
|
|
68
|
+
this.audioContext.resume();
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Get the sound from the 'spritemap'
|
|
72
|
+
const sound = this.manifest.spritemap[soundName];
|
|
73
|
+
if (!sound) {
|
|
74
|
+
console.warn(`Sound "${soundName}" not found in spritemap.`);
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Calculate duration from start/end times
|
|
79
|
+
const duration = sound.end - sound.start;
|
|
80
|
+
|
|
81
|
+
if (duration <= 0) {
|
|
82
|
+
console.warn(`Sound "${soundName}" has invalid duration.`);
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const source = this.audioContext.createBufferSource();
|
|
87
|
+
source.buffer = this.audioBuffer;
|
|
88
|
+
|
|
89
|
+
const gain = this.audioContext.createGain();
|
|
90
|
+
gain.gain.setValueAtTime(1.0, this.audioContext.currentTime);
|
|
91
|
+
|
|
92
|
+
source.connect(gain);
|
|
93
|
+
gain.connect(this.audioContext.destination);
|
|
94
|
+
|
|
95
|
+
// Use the 'audiosprite' format: start(when, offset, duration)
|
|
96
|
+
source.start(
|
|
97
|
+
0, // Start playing now
|
|
98
|
+
sound.start, // The offset
|
|
99
|
+
duration // The calculated duration
|
|
100
|
+
);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
getManifest() {
|
|
104
|
+
return this.manifest;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
getAudioBuffer() {
|
|
108
|
+
return this.audioBuffer;
|
|
109
|
+
}
|
|
110
|
+
}
|